954 Label rewrite, done;
955
956 const Register bc = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
957 LP64_ONLY(assert(rbx != bc, "register damaged"));
958
959 // get next byte
960 __ load_unsigned_byte(rbx, at_bcp(Bytecodes::length_for(Bytecodes::_aload_0)));
961
962 // if _getfield then wait with rewrite
963 __ cmpl(rbx, Bytecodes::_getfield);
964 __ jcc(Assembler::equal, done);
965
966 // if _igetfield then rewrite to _fast_iaccess_0
967 assert(Bytecodes::java_code(Bytecodes::_fast_iaccess_0) == Bytecodes::_aload_0, "fix bytecode definition");
968 __ cmpl(rbx, Bytecodes::_fast_igetfield);
969 __ movl(bc, Bytecodes::_fast_iaccess_0);
970 __ jccb(Assembler::equal, rewrite);
971
972 // if _agetfield then rewrite to _fast_aaccess_0
973 assert(Bytecodes::java_code(Bytecodes::_fast_aaccess_0) == Bytecodes::_aload_0, "fix bytecode definition");
974 assert(ValueTypesBufferMaxMemory == 0, "Such rewritting doesn't support flattened values yet");
975 __ cmpl(rbx, Bytecodes::_fast_agetfield);
976 __ movl(bc, Bytecodes::_fast_aaccess_0);
977 __ jccb(Assembler::equal, rewrite);
978
979 // if _fgetfield then rewrite to _fast_faccess_0
980 assert(Bytecodes::java_code(Bytecodes::_fast_faccess_0) == Bytecodes::_aload_0, "fix bytecode definition");
981 __ cmpl(rbx, Bytecodes::_fast_fgetfield);
982 __ movl(bc, Bytecodes::_fast_faccess_0);
983 __ jccb(Assembler::equal, rewrite);
984
985 // else rewrite to _fast_aload0
986 assert(Bytecodes::java_code(Bytecodes::_fast_aload_0) == Bytecodes::_aload_0, "fix bytecode definition");
987 __ movl(bc, Bytecodes::_fast_aload_0);
988
989 // rewrite
990 // bc: fast bytecode
991 __ bind(rewrite);
992 patch_bytecode(Bytecodes::_aload_0, bc, rbx, false);
993
994 __ bind(done);
1156 // Move subklass into rbx
1157 __ load_klass(rbx, rax);
1158 // Move array element superklass into rax
1159 __ movptr(rax, Address(rdi,
1160 ObjArrayKlass::element_klass_offset()));
1161
1162 // Generate subtype check. Blows rcx, rdi
1163 // Superklass in rax. Subklass in rbx.
1164 // is "rbx <: rax" ? (value subclass <: array element superclass)
1165 __ gen_subtype_check(rbx, ok_is_subtype);
1166
1167 // Come here on failure
1168 // object is at TOS
1169 __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
1170
1171 // Come here on success
1172 __ bind(ok_is_subtype);
1173
1174 // Get the value we will store
1175 __ movptr(rax, at_tos());
1176 if (ValueTypesBufferMaxMemory > 0) {
1177 Label is_on_heap;
1178 __ test_value_is_not_buffered(rax, rbx, is_on_heap);
1179 __ push(rdx); // save precomputed element address, and convert buffer oop to heap oop
1180 __ call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::value_heap_copy), rax);
1181 __ pop(rdx);
1182 __ bind(is_on_heap);
1183 }
1184 __ movl(rcx, at_tos_p1()); // index
1185 // Now store using the appropriate barrier
1186 do_oop_store(_masm, element_address, rax, IS_ARRAY);
1187 __ jmp(done);
1188
1189 // Have a NULL in rax, rdx=array, ecx=index. Store NULL at ary[idx]
1190 __ bind(is_null);
1191 __ profile_null_seen(rbx);
1192 if (EnableValhalla) {
1193 Label is_null_into_value_array_npe, store_null;
1194
1195 __ load_klass(rdi, rdx);
1196 // No way to store null in flat array
1197 __ test_flat_array_klass(rdi, rbx, is_null_into_value_array_npe);
1198
1199 // Use case for storing values in objArray where element_klass is specifically
1200 // a value type because they could not be flattened "for reasons",
1201 // these need to have the same semantics as flat arrays, i.e. NPE
1202 __ movptr(rdi, Address(rdi, ObjArrayKlass::element_klass_offset()));
1203 __ test_klass_is_value(rdi, rdi, is_null_into_value_array_npe);
2197 __ decrementl(rax);
2198 }
2199 __ bind(done);
2200 } else {
2201 #ifdef _LP64
2202 ShouldNotReachHere();
2203 #else
2204 if (is_float) {
2205 __ fld_s(at_rsp());
2206 } else {
2207 __ fld_d(at_rsp());
2208 __ pop(rdx);
2209 }
2210 __ pop(rcx);
2211 __ fcmp2int(rax, unordered_result < 0);
2212 #endif // _LP64
2213 }
2214 }
2215
2216 void TemplateTable::branch(bool is_jsr, bool is_wide) {
2217 if (ValueTypesThreadLocalRecycling) {
2218 Label no_vt_recycling, no_fixing_required;
2219 const Register thread1 = NOT_LP64(rbx) LP64_ONLY(r15_thread);
2220 NOT_LP64(__ get_thread(thread1));
2221 __ movptr(rbx, Address(thread1, in_bytes(JavaThread::vt_alloc_ptr_offset())));
2222 __ testptr(rbx, rbx);
2223 __ jcc(Assembler::zero, no_vt_recycling);
2224 __ movptr(rcx, Address(rbp, frame::interpreter_frame_vt_alloc_ptr_offset * wordSize));
2225 __ testptr(rcx, rcx);
2226 __ jcc(Assembler::notZero, no_fixing_required);
2227 // vt_alloc_ptr in JavaThread is non-null but frame vt_alloc_ptr is null
2228 // which means frame vt_alloc_ptr needs to be initialized
2229 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::fix_frame_vt_alloc_ptr));
2230 __ movptr(rcx, Address(rbp, frame::interpreter_frame_vt_alloc_ptr_offset * wordSize));
2231 __ bind(no_fixing_required);
2232 __ testptr(rcx, rbx);
2233 __ jcc(Assembler::equal, no_vt_recycling);
2234 __ andptr(rcx, VTBufferChunk::chunk_mask());
2235 __ movl(rcx, Address(rcx, VTBufferChunk::index_offset()));
2236 __ andptr(rbx, VTBufferChunk::chunk_mask());
2237 __ movl(rbx, Address(rbx, VTBufferChunk::index_offset()));
2238 __ subl(rbx, rcx);
2239 __ get_method(rcx);
2240 __ movl(rcx, Address(rcx, Method::max_vt_buffer_offset()));
2241 __ cmpl(rbx, rcx);
2242 __ jcc(Assembler::lessEqual, no_vt_recycling);
2243 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::recycle_buffered_values));
2244 __ bind(no_vt_recycling);
2245 }
2246
2247 __ get_method(rcx); // rcx holds method
2248 __ profile_taken_branch(rax, rbx); // rax holds updated MDP, rbx
2249 // holds bumped taken count
2250
2251 const ByteSize be_offset = MethodCounters::backedge_counter_offset() +
2252 InvocationCounter::counter_offset();
2253 const ByteSize inv_offset = MethodCounters::invocation_counter_offset() +
2254 InvocationCounter::counter_offset();
2255
2256 // Load up edx with the branch displacement
2257 if (is_wide) {
2258 __ movl(rdx, at_bcp(1));
2259 } else {
2260 __ load_signed_short(rdx, at_bcp(1));
2261 }
2262 __ bswapl(rdx);
2263
2264 if (!is_wide) {
2265 __ sarl(rdx, 16);
2266 }
2801 __ jcc(Assembler::zero, no_safepoint);
2802 __ push(state);
2803 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2804 InterpreterRuntime::at_safepoint));
2805 __ pop(state);
2806 __ bind(no_safepoint);
2807 }
2808
2809 if (EnableValhalla && state == atos) {
2810 Label not_returning_null_vt;
2811 const Register method = rbx, tmp = rdx;
2812
2813 __ testl(rax, rax);
2814 __ jcc(Assembler::notZero, not_returning_null_vt);
2815 __ get_method(method);
2816 __ load_unsigned_short(tmp, Address(rbx, Method::flags_offset()));
2817 __ testl(tmp, Method::is_returning_vt_mask());
2818 __ jcc(Assembler::zero, not_returning_null_vt);
2819 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::deoptimize_caller_frame_for_vt), method);
2820 __ bind(not_returning_null_vt);
2821
2822 if (ValueTypesBufferMaxMemory > 0) {
2823 Label notBuffered;
2824
2825 __ test_value_is_not_buffered(rax, rbx, notBuffered);
2826 const Register thread1 = NOT_LP64(rcx) LP64_ONLY(r15_thread);
2827 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::return_value), rax);
2828 NOT_LP64(__ get_thread(thread1));
2829 __ get_vm_result(rax, thread1);
2830 __ bind(notBuffered);
2831 }
2832 }
2833
2834 // Narrow result if state is itos but result type is smaller.
2835 // Need to narrow in the return bytecode rather than in generate_return_entry
2836 // since compiled code callers expect the result to already be narrowed.
2837 if (state == itos) {
2838 __ narrow(rax);
2839 }
2840
2841 __ remove_activation(state, rbcp, true, true, true, /*state == qtos*/ false && ValueTypeReturnedAsFields);
2842
2843 __ jmp(rbcp);
2844 }
2845
2846 // ----------------------------------------------------------------------------
2847 // Volatile variables demand their effects be made known to all CPU's
2848 // in order. Store buffers on most chips allow reads & writes to
2849 // reorder; the JMM's ReadAfterWrite.java test fails in -Xint mode
2850 // without some kind of memory barrier (i.e., it's not sufficient that
2851 // the interpreter does not reorder volatile references, the hardware
3421 __ jcc(Assembler::notEqual, notObj);
3422
3423 // atos
3424 {
3425 if (!EnableValhalla) {
3426 __ pop(atos);
3427 if (!is_static) pop_and_check_object(obj);
3428 // Store into the field
3429 do_oop_store(_masm, field, rax);
3430 if (!is_static && rc == may_rewrite) {
3431 patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3432 }
3433 __ jmp(Done);
3434 } else {
3435 __ pop(atos);
3436 if (is_static) {
3437 Label notFlattenable, notBuffered;
3438 __ test_field_is_not_flattenable(flags2, rscratch1, notFlattenable);
3439 __ null_check(rax);
3440 __ bind(notFlattenable);
3441 if (ValueTypesBufferMaxMemory > 0) {
3442 __ test_value_is_not_buffered(rax, rscratch1, notBuffered);
3443 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::write_heap_copy),
3444 rax, off, obj);
3445 __ jmp(Done);
3446 __ bind(notBuffered);
3447 }
3448 do_oop_store(_masm, field, rax);
3449 __ jmp(Done);
3450 } else {
3451 Label isFlattenable, isFlattened, notBuffered, notBuffered2, rewriteNotFlattenable, rewriteFlattenable;
3452 __ test_field_is_flattenable(flags2, rscratch1, isFlattenable);
3453 // Not flattenable case, covers not flattenable values and objects
3454 pop_and_check_object(obj);
3455 // Store into the field
3456 if (ValueTypesBufferMaxMemory > 0) {
3457 __ test_value_is_not_buffered(rax, rscratch1, notBuffered);
3458 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::write_heap_copy),
3459 rax, off, obj);
3460 __ jmp(rewriteNotFlattenable);
3461 __ bind(notBuffered);
3462 }
3463 do_oop_store(_masm, field, rax);
3464 __ bind(rewriteNotFlattenable);
3465 if (rc == may_rewrite) {
3466 patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3467 }
3468 __ jmp(Done);
3469 // Implementation of the flattenable semantic
3470 __ bind(isFlattenable);
3471 __ null_check(rax);
3472 __ test_field_is_flattened(flags2, rscratch1, isFlattened);
3473 // Not flattened case
3474 if (ValueTypesBufferMaxMemory > 0) {
3475 __ test_value_is_not_buffered(rax, rscratch1, notBuffered2);
3476 pop_and_check_object(obj);
3477 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::write_heap_copy),
3478 rax, off, obj);
3479 __ jmp(rewriteFlattenable);
3480 __ bind(notBuffered2);
3481 }
3482 pop_and_check_object(obj);
3483 // Store into the field
3484 do_oop_store(_masm, field, rax);
3485 __ jmp(rewriteFlattenable);
3486 __ bind(isFlattened);
3487 pop_and_check_object(obj);
3488 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::write_flattened_value),
3489 rax, off, obj);
3490 __ bind(rewriteFlattenable);
3491 if (rc == may_rewrite) {
3492 patch_bytecode(Bytecodes::_fast_qputfield, bc, rbx, true, byte_no);
3493 }
3494 __ jmp(Done);
3495 }
3496 }
3497 }
3498
3499 __ bind(notObj);
3500 __ cmpl(flags, itos);
3501 __ jcc(Assembler::notEqual, notInt);
3730 // Assembler::StoreStore));
3731
3732 if (bytecode() == Bytecodes::_fast_qputfield) {
3733 __ movl(rscratch2, rdx);
3734 }
3735
3736 Label notVolatile;
3737 __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3738 __ andl(rdx, 0x1);
3739
3740 // Get object from stack
3741 pop_and_check_object(rcx);
3742
3743 // field address
3744 const Address field(rcx, rbx, Address::times_1);
3745
3746 // access field
3747 switch (bytecode()) {
3748 case Bytecodes::_fast_qputfield:
3749 {
3750 Label isFlattened, notBuffered, done;
3751 __ null_check(rax);
3752 __ test_field_is_flattened(rscratch2, rscratch1, isFlattened);
3753 // No Flattened case
3754 if (ValueTypesBufferMaxMemory > 0) {
3755 __ test_value_is_not_buffered(rax, rscratch1, notBuffered);
3756 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::write_heap_copy),
3757 rax, rbx, rcx);
3758 __ jmp(done);
3759 __ bind(notBuffered);
3760 }
3761 do_oop_store(_masm, field, rax);
3762 __ jmp(done);
3763 __ bind(isFlattened);
3764 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::write_flattened_value),
3765 rax, rbx, rcx);
3766 __ bind(done);
3767 }
3768 break;
3769 case Bytecodes::_fast_aputfield:
3770 {
3771 Label notBuffered, done;
3772 if (ValueTypesBufferMaxMemory > 0) {
3773 __ test_value_is_not_buffered(rax, rscratch1, notBuffered);
3774 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::write_heap_copy),
3775 rax, rbx, rcx);
3776 __ jmp(done);
3777 __ bind(notBuffered);
3778 }
3779 do_oop_store(_masm, field, rax);
3780 __ bind(done);
3781 }
3782 break;
3783 case Bytecodes::_fast_lputfield:
3784 #ifdef _LP64
3785 __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg);
3786 #else
3787 __ stop("should not be rewritten");
3788 #endif
3789 break;
3790 case Bytecodes::_fast_iputfield:
3791 __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg);
3792 break;
3793 case Bytecodes::_fast_zputfield:
3794 __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg);
3795 break;
3796 case Bytecodes::_fast_bputfield:
3797 __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg);
3798 break;
3799 case Bytecodes::_fast_sputfield:
3800 __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg);
|
954 Label rewrite, done;
955
956 const Register bc = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
957 LP64_ONLY(assert(rbx != bc, "register damaged"));
958
959 // get next byte
960 __ load_unsigned_byte(rbx, at_bcp(Bytecodes::length_for(Bytecodes::_aload_0)));
961
962 // if _getfield then wait with rewrite
963 __ cmpl(rbx, Bytecodes::_getfield);
964 __ jcc(Assembler::equal, done);
965
966 // if _igetfield then rewrite to _fast_iaccess_0
967 assert(Bytecodes::java_code(Bytecodes::_fast_iaccess_0) == Bytecodes::_aload_0, "fix bytecode definition");
968 __ cmpl(rbx, Bytecodes::_fast_igetfield);
969 __ movl(bc, Bytecodes::_fast_iaccess_0);
970 __ jccb(Assembler::equal, rewrite);
971
972 // if _agetfield then rewrite to _fast_aaccess_0
973 assert(Bytecodes::java_code(Bytecodes::_fast_aaccess_0) == Bytecodes::_aload_0, "fix bytecode definition");
974 __ cmpl(rbx, Bytecodes::_fast_agetfield);
975 __ movl(bc, Bytecodes::_fast_aaccess_0);
976 __ jccb(Assembler::equal, rewrite);
977
978 // if _fgetfield then rewrite to _fast_faccess_0
979 assert(Bytecodes::java_code(Bytecodes::_fast_faccess_0) == Bytecodes::_aload_0, "fix bytecode definition");
980 __ cmpl(rbx, Bytecodes::_fast_fgetfield);
981 __ movl(bc, Bytecodes::_fast_faccess_0);
982 __ jccb(Assembler::equal, rewrite);
983
984 // else rewrite to _fast_aload0
985 assert(Bytecodes::java_code(Bytecodes::_fast_aload_0) == Bytecodes::_aload_0, "fix bytecode definition");
986 __ movl(bc, Bytecodes::_fast_aload_0);
987
988 // rewrite
989 // bc: fast bytecode
990 __ bind(rewrite);
991 patch_bytecode(Bytecodes::_aload_0, bc, rbx, false);
992
993 __ bind(done);
1155 // Move subklass into rbx
1156 __ load_klass(rbx, rax);
1157 // Move array element superklass into rax
1158 __ movptr(rax, Address(rdi,
1159 ObjArrayKlass::element_klass_offset()));
1160
1161 // Generate subtype check. Blows rcx, rdi
1162 // Superklass in rax. Subklass in rbx.
1163 // is "rbx <: rax" ? (value subclass <: array element superclass)
1164 __ gen_subtype_check(rbx, ok_is_subtype);
1165
1166 // Come here on failure
1167 // object is at TOS
1168 __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
1169
1170 // Come here on success
1171 __ bind(ok_is_subtype);
1172
1173 // Get the value we will store
1174 __ movptr(rax, at_tos());
1175 __ movl(rcx, at_tos_p1()); // index
1176 // Now store using the appropriate barrier
1177 do_oop_store(_masm, element_address, rax, IS_ARRAY);
1178 __ jmp(done);
1179
1180 // Have a NULL in rax, rdx=array, ecx=index. Store NULL at ary[idx]
1181 __ bind(is_null);
1182 __ profile_null_seen(rbx);
1183 if (EnableValhalla) {
1184 Label is_null_into_value_array_npe, store_null;
1185
1186 __ load_klass(rdi, rdx);
1187 // No way to store null in flat array
1188 __ test_flat_array_klass(rdi, rbx, is_null_into_value_array_npe);
1189
1190 // Use case for storing values in objArray where element_klass is specifically
1191 // a value type because they could not be flattened "for reasons",
1192 // these need to have the same semantics as flat arrays, i.e. NPE
1193 __ movptr(rdi, Address(rdi, ObjArrayKlass::element_klass_offset()));
1194 __ test_klass_is_value(rdi, rdi, is_null_into_value_array_npe);
2188 __ decrementl(rax);
2189 }
2190 __ bind(done);
2191 } else {
2192 #ifdef _LP64
2193 ShouldNotReachHere();
2194 #else
2195 if (is_float) {
2196 __ fld_s(at_rsp());
2197 } else {
2198 __ fld_d(at_rsp());
2199 __ pop(rdx);
2200 }
2201 __ pop(rcx);
2202 __ fcmp2int(rax, unordered_result < 0);
2203 #endif // _LP64
2204 }
2205 }
2206
2207 void TemplateTable::branch(bool is_jsr, bool is_wide) {
2208 __ get_method(rcx); // rcx holds method
2209 __ profile_taken_branch(rax, rbx); // rax holds updated MDP, rbx
2210 // holds bumped taken count
2211
2212 const ByteSize be_offset = MethodCounters::backedge_counter_offset() +
2213 InvocationCounter::counter_offset();
2214 const ByteSize inv_offset = MethodCounters::invocation_counter_offset() +
2215 InvocationCounter::counter_offset();
2216
2217 // Load up edx with the branch displacement
2218 if (is_wide) {
2219 __ movl(rdx, at_bcp(1));
2220 } else {
2221 __ load_signed_short(rdx, at_bcp(1));
2222 }
2223 __ bswapl(rdx);
2224
2225 if (!is_wide) {
2226 __ sarl(rdx, 16);
2227 }
2762 __ jcc(Assembler::zero, no_safepoint);
2763 __ push(state);
2764 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2765 InterpreterRuntime::at_safepoint));
2766 __ pop(state);
2767 __ bind(no_safepoint);
2768 }
2769
2770 if (EnableValhalla && state == atos) {
2771 Label not_returning_null_vt;
2772 const Register method = rbx, tmp = rdx;
2773
2774 __ testl(rax, rax);
2775 __ jcc(Assembler::notZero, not_returning_null_vt);
2776 __ get_method(method);
2777 __ load_unsigned_short(tmp, Address(rbx, Method::flags_offset()));
2778 __ testl(tmp, Method::is_returning_vt_mask());
2779 __ jcc(Assembler::zero, not_returning_null_vt);
2780 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::deoptimize_caller_frame_for_vt), method);
2781 __ bind(not_returning_null_vt);
2782 }
2783
2784 // Narrow result if state is itos but result type is smaller.
2785 // Need to narrow in the return bytecode rather than in generate_return_entry
2786 // since compiled code callers expect the result to already be narrowed.
2787 if (state == itos) {
2788 __ narrow(rax);
2789 }
2790
2791 __ remove_activation(state, rbcp, true, true, true, /*state == qtos*/ false && ValueTypeReturnedAsFields);
2792
2793 __ jmp(rbcp);
2794 }
2795
2796 // ----------------------------------------------------------------------------
2797 // Volatile variables demand their effects be made known to all CPU's
2798 // in order. Store buffers on most chips allow reads & writes to
2799 // reorder; the JMM's ReadAfterWrite.java test fails in -Xint mode
2800 // without some kind of memory barrier (i.e., it's not sufficient that
2801 // the interpreter does not reorder volatile references, the hardware
3371 __ jcc(Assembler::notEqual, notObj);
3372
3373 // atos
3374 {
3375 if (!EnableValhalla) {
3376 __ pop(atos);
3377 if (!is_static) pop_and_check_object(obj);
3378 // Store into the field
3379 do_oop_store(_masm, field, rax);
3380 if (!is_static && rc == may_rewrite) {
3381 patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3382 }
3383 __ jmp(Done);
3384 } else {
3385 __ pop(atos);
3386 if (is_static) {
3387 Label notFlattenable, notBuffered;
3388 __ test_field_is_not_flattenable(flags2, rscratch1, notFlattenable);
3389 __ null_check(rax);
3390 __ bind(notFlattenable);
3391 do_oop_store(_masm, field, rax);
3392 __ jmp(Done);
3393 } else {
3394 Label isFlattenable, isFlattened, notBuffered, notBuffered2, rewriteNotFlattenable, rewriteFlattenable;
3395 __ test_field_is_flattenable(flags2, rscratch1, isFlattenable);
3396 // Not flattenable case, covers not flattenable values and objects
3397 pop_and_check_object(obj);
3398 // Store into the field
3399 do_oop_store(_masm, field, rax);
3400 __ bind(rewriteNotFlattenable);
3401 if (rc == may_rewrite) {
3402 patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3403 }
3404 __ jmp(Done);
3405 // Implementation of the flattenable semantic
3406 __ bind(isFlattenable);
3407 __ null_check(rax);
3408 __ test_field_is_flattened(flags2, rscratch1, isFlattened);
3409 // Not flattened case
3410 pop_and_check_object(obj);
3411 // Store into the field
3412 do_oop_store(_masm, field, rax);
3413 __ jmp(rewriteFlattenable);
3414 __ bind(isFlattened);
3415 pop_and_check_object(obj);
3416 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::write_flattened_value),
3417 rax, off, obj);
3418 __ bind(rewriteFlattenable);
3419 if (rc == may_rewrite) {
3420 patch_bytecode(Bytecodes::_fast_qputfield, bc, rbx, true, byte_no);
3421 }
3422 __ jmp(Done);
3423 }
3424 }
3425 }
3426
3427 __ bind(notObj);
3428 __ cmpl(flags, itos);
3429 __ jcc(Assembler::notEqual, notInt);
3658 // Assembler::StoreStore));
3659
3660 if (bytecode() == Bytecodes::_fast_qputfield) {
3661 __ movl(rscratch2, rdx);
3662 }
3663
3664 Label notVolatile;
3665 __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3666 __ andl(rdx, 0x1);
3667
3668 // Get object from stack
3669 pop_and_check_object(rcx);
3670
3671 // field address
3672 const Address field(rcx, rbx, Address::times_1);
3673
3674 // access field
3675 switch (bytecode()) {
3676 case Bytecodes::_fast_qputfield:
3677 {
3678 Label isFlattened, done;
3679 __ null_check(rax);
3680 __ test_field_is_flattened(rscratch2, rscratch1, isFlattened);
3681 // No Flattened case
3682 do_oop_store(_masm, field, rax);
3683 __ jmp(done);
3684 __ bind(isFlattened);
3685 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::write_flattened_value),
3686 rax, rbx, rcx);
3687 __ bind(done);
3688 }
3689 break;
3690 case Bytecodes::_fast_aputfield:
3691 {
3692 do_oop_store(_masm, field, rax);
3693 }
3694 break;
3695 case Bytecodes::_fast_lputfield:
3696 #ifdef _LP64
3697 __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg);
3698 #else
3699 __ stop("should not be rewritten");
3700 #endif
3701 break;
3702 case Bytecodes::_fast_iputfield:
3703 __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg);
3704 break;
3705 case Bytecodes::_fast_zputfield:
3706 __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg);
3707 break;
3708 case Bytecodes::_fast_bputfield:
3709 __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg);
3710 break;
3711 case Bytecodes::_fast_sputfield:
3712 __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg);
|