< prev index next >

src/hotspot/cpu/x86/templateTable_x86.cpp

Print this page
rev 50081 : [mq]: primitives2.patch

*** 765,822 **** void TemplateTable::iaload() { transition(itos, itos); // rax: index // rdx: array index_check(rdx, rax); // kills rbx ! __ resolve_for_read(OOP_NOT_NULL, rdx); ! __ movl(rax, Address(rdx, rax, Address::times_4, ! arrayOopDesc::base_offset_in_bytes(T_INT))); } void TemplateTable::laload() { transition(itos, ltos); // rax: index // rdx: array index_check(rdx, rax); // kills rbx NOT_LP64(__ mov(rbx, rax)); // rbx,: index ! __ resolve_for_read(OOP_NOT_NULL, rdx); ! __ movptr(rax, Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 0 * wordSize)); ! NOT_LP64(__ movl(rdx, Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 1 * wordSize))); } void TemplateTable::faload() { transition(itos, ftos); // rax: index // rdx: array index_check(rdx, rax); // kills rbx ! __ resolve_for_read(OOP_NOT_NULL, rdx); ! __ load_float(Address(rdx, rax, Address::times_4, ! arrayOopDesc::base_offset_in_bytes(T_FLOAT))); } void TemplateTable::daload() { transition(itos, dtos); // rax: index // rdx: array index_check(rdx, rax); // kills rbx ! __ resolve_for_read(OOP_NOT_NULL, rdx); ! __ load_double(Address(rdx, rax, Address::times_8, ! arrayOopDesc::base_offset_in_bytes(T_DOUBLE))); } void TemplateTable::aaload() { transition(itos, atos); // rax: index // rdx: array index_check(rdx, rax); // kills rbx - __ resolve_for_read(OOP_NOT_NULL, rdx); do_oop_load(_masm, Address(rdx, rax, UseCompressedOops ? Address::times_4 : Address::times_ptr, arrayOopDesc::base_offset_in_bytes(T_OBJECT)), rax, --- 765,824 ---- void TemplateTable::iaload() { transition(itos, itos); // rax: index // rdx: array index_check(rdx, rax); // kills rbx ! __ access_load_at(T_INT, IN_HEAP, rax, Address(rdx, rax, Address::times_4, ! arrayOopDesc::base_offset_in_bytes(T_INT)), ! noreg, noreg); } void TemplateTable::laload() { transition(itos, ltos); // rax: index // rdx: array index_check(rdx, rax); // kills rbx NOT_LP64(__ mov(rbx, rax)); // rbx,: index ! __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, ! Address(rdx, rbx, Address::times_8, ! arrayOopDesc::base_offset_in_bytes(T_LONG)), ! noreg, noreg); } void TemplateTable::faload() { transition(itos, ftos); // rax: index // rdx: array index_check(rdx, rax); // kills rbx ! __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, ! Address(rdx, rax, Address::times_4, ! arrayOopDesc::base_offset_in_bytes(T_FLOAT)), ! noreg, noreg); } void TemplateTable::daload() { transition(itos, dtos); // rax: index // rdx: array index_check(rdx, rax); // kills rbx ! __ access_load_at(T_DOUBLE, IN_HEAP, noreg /* dtos */, ! Address(rdx, rax, Address::times_8, ! arrayOopDesc::base_offset_in_bytes(T_DOUBLE)), ! noreg, noreg); } void TemplateTable::aaload() { transition(itos, atos); // rax: index // rdx: array index_check(rdx, rax); // kills rbx do_oop_load(_masm, Address(rdx, rax, UseCompressedOops ? Address::times_4 : Address::times_ptr, arrayOopDesc::base_offset_in_bytes(T_OBJECT)), rax,
*** 826,846 **** void TemplateTable::baload() { transition(itos, itos); // rax: index // rdx: array index_check(rdx, rax); // kills rbx ! __ resolve_for_read(OOP_NOT_NULL, rdx); ! __ load_signed_byte(rax, Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE))); } void TemplateTable::caload() { transition(itos, itos); // rax: index // rdx: array index_check(rdx, rax); // kills rbx ! __ resolve_for_read(OOP_NOT_NULL, rdx); ! __ load_unsigned_short(rax, Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR))); } // iload followed by caload frequent pair void TemplateTable::fast_icaload() { transition(vtos, itos); --- 828,850 ---- void TemplateTable::baload() { transition(itos, itos); // rax: index // rdx: array index_check(rdx, rax); // kills rbx ! __ access_load_at(T_BYTE, IN_HEAP, rax, ! Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)), ! noreg, noreg); } void TemplateTable::caload() { transition(itos, itos); // rax: index // rdx: array index_check(rdx, rax); // kills rbx ! __ access_load_at(T_CHAR, IN_HEAP, rax, ! Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)), ! noreg, noreg); } // iload followed by caload frequent pair void TemplateTable::fast_icaload() { transition(vtos, itos);
*** 849,873 **** __ movl(rax, iaddress(rbx)); // rax: index // rdx: array index_check(rdx, rax); // kills rbx ! __ resolve_for_read(OOP_NOT_NULL, rdx); ! __ load_unsigned_short(rax, ! Address(rdx, rax, ! Address::times_2, ! arrayOopDesc::base_offset_in_bytes(T_CHAR))); } void TemplateTable::saload() { transition(itos, itos); // rax: index // rdx: array index_check(rdx, rax); // kills rbx ! __ resolve_for_read(OOP_NOT_NULL, rdx); ! __ load_signed_short(rax, Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_SHORT))); } void TemplateTable::iload(int n) { transition(vtos, itos); __ movl(rax, iaddress(n)); --- 853,876 ---- __ movl(rax, iaddress(rbx)); // rax: index // rdx: array index_check(rdx, rax); // kills rbx ! __ access_load_at(T_CHAR, IN_HEAP, rax, ! Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)), ! noreg, noreg); } void TemplateTable::saload() { transition(itos, itos); // rax: index // rdx: array index_check(rdx, rax); // kills rbx ! __ access_load_at(T_SHORT, IN_HEAP, rax, ! Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_SHORT)), ! noreg, noreg); } void TemplateTable::iload(int n) { transition(vtos, itos); __ movl(rax, iaddress(n));
*** 1055,1105 **** __ pop_i(rbx); // rax: value // rbx: index // rdx: array index_check(rdx, rbx); // prefer index in rbx ! __ resolve_for_write(OOP_NOT_NULL, rdx); ! __ movl(Address(rdx, rbx, Address::times_4, ! arrayOopDesc::base_offset_in_bytes(T_INT)), ! rax); } void TemplateTable::lastore() { transition(ltos, vtos); __ pop_i(rbx); // rax,: low(value) // rcx: array // rdx: high(value) index_check(rcx, rbx); // prefer index in rbx, // rbx,: index ! __ resolve_for_write(OOP_NOT_NULL, rcx); ! __ movptr(Address(rcx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 0 * wordSize), rax); ! NOT_LP64(__ movl(Address(rcx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 1 * wordSize), rdx)); } void TemplateTable::fastore() { transition(ftos, vtos); __ pop_i(rbx); // value is in UseSSE >= 1 ? xmm0 : ST(0) // rbx: index // rdx: array index_check(rdx, rbx); // prefer index in rbx ! __ resolve_for_write(OOP_NOT_NULL, rdx); ! __ store_float(Address(rdx, rbx, Address::times_4, arrayOopDesc::base_offset_in_bytes(T_FLOAT))); } void TemplateTable::dastore() { transition(dtos, vtos); __ pop_i(rbx); // value is in UseSSE >= 2 ? xmm0 : ST(0) // rbx: index // rdx: array index_check(rdx, rbx); // prefer index in rbx ! __ resolve_for_write(OOP_NOT_NULL, rdx); ! __ store_double(Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_DOUBLE))); } void TemplateTable::aastore() { Label is_null, ok_is_subtype, done; transition(vtos, vtos); --- 1058,1102 ---- __ pop_i(rbx); // rax: value // rbx: index // rdx: array index_check(rdx, rbx); // prefer index in rbx ! __ access_store_at(T_INT, IN_HEAP, Address(rdx, rbx, Address::times_4, ! arrayOopDesc::base_offset_in_bytes(T_INT)), rax, noreg, noreg); } void TemplateTable::lastore() { transition(ltos, vtos); __ pop_i(rbx); // rax,: low(value) // rcx: array // rdx: high(value) index_check(rcx, rbx); // prefer index in rbx, // rbx,: index ! __ access_store_at(T_LONG, IN_HEAP, Address(rcx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG)), noreg /* ltos */, noreg, noreg); } void TemplateTable::fastore() { transition(ftos, vtos); __ pop_i(rbx); // value is in UseSSE >= 1 ? xmm0 : ST(0) // rbx: index // rdx: array index_check(rdx, rbx); // prefer index in rbx ! __ access_store_at(T_FLOAT, IN_HEAP, Address(rdx, rbx, Address::times_4, arrayOopDesc::base_offset_in_bytes(T_FLOAT)), noreg /* ftos */, noreg, noreg); } void TemplateTable::dastore() { transition(dtos, vtos); __ pop_i(rbx); // value is in UseSSE >= 2 ? xmm0 : ST(0) // rbx: index // rdx: array index_check(rdx, rbx); // prefer index in rbx ! __ access_store_at(T_DOUBLE, IN_HEAP, Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_DOUBLE)), noreg /* ftos */, noreg, noreg); } void TemplateTable::aastore() { Label is_null, ok_is_subtype, done; transition(vtos, vtos);
*** 1111,1148 **** Address element_address(rdx, rcx, UseCompressedOops? Address::times_4 : Address::times_ptr, arrayOopDesc::base_offset_in_bytes(T_OBJECT)); index_check_without_pop(rdx, rcx); // kills rbx - __ resolve_for_write(OOP_NOT_NULL, rdx); __ testptr(rax, rax); __ jcc(Assembler::zero, is_null); // Move subklass into rbx __ load_klass(rbx, rax); // Move superklass into rax __ load_klass(rax, rdx); __ movptr(rax, Address(rax, ObjArrayKlass::element_klass_offset())); - // Compress array + index*oopSize + 12 into a single register. Frees rcx. - __ lea(rdx, element_address); // Generate subtype check. Blows rcx, rdi // Superklass in rax. Subklass in rbx. __ gen_subtype_check(rbx, ok_is_subtype); - // Come here on failure // object is at TOS __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry)); // Come here on success __ bind(ok_is_subtype); // Get the value we will store __ movptr(rax, at_tos()); // Now store using the appropriate barrier ! do_oop_store(_masm, Address(rdx, 0), rax, IN_HEAP_ARRAY); __ jmp(done); // Have a NULL in rax, rdx=array, ecx=index. Store NULL at ary[idx] __ bind(is_null); __ profile_null_seen(rbx); --- 1108,1142 ---- Address element_address(rdx, rcx, UseCompressedOops? Address::times_4 : Address::times_ptr, arrayOopDesc::base_offset_in_bytes(T_OBJECT)); index_check_without_pop(rdx, rcx); // kills rbx __ testptr(rax, rax); __ jcc(Assembler::zero, is_null); // Move subklass into rbx __ load_klass(rbx, rax); // Move superklass into rax __ load_klass(rax, rdx); __ movptr(rax, Address(rax, ObjArrayKlass::element_klass_offset())); // Generate subtype check. Blows rcx, rdi // Superklass in rax. Subklass in rbx. __ gen_subtype_check(rbx, ok_is_subtype); // Come here on failure // object is at TOS __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry)); // Come here on success __ bind(ok_is_subtype); // Get the value we will store __ movptr(rax, at_tos()); + __ movl(rcx, at_tos_p1()); // index // Now store using the appropriate barrier ! do_oop_store(_masm, element_address, rax, IN_HEAP_ARRAY); __ jmp(done); // Have a NULL in rax, rdx=array, ecx=index. Store NULL at ary[idx] __ bind(is_null); __ profile_null_seen(rbx);
*** 1160,1198 **** __ pop_i(rbx); // rax: value // rbx: index // rdx: array index_check(rdx, rbx); // prefer index in rbx - __ resolve_for_write(OOP_NOT_NULL, rdx); // Need to check whether array is boolean or byte // since both types share the bastore bytecode. __ load_klass(rcx, rdx); __ movl(rcx, Address(rcx, Klass::layout_helper_offset())); int diffbit = Klass::layout_helper_boolean_diffbit(); __ testl(rcx, diffbit); Label L_skip; __ jccb(Assembler::zero, L_skip); __ andl(rax, 1); // if it is a T_BOOLEAN array, mask the stored value to 0/1 __ bind(L_skip); ! __ movb(Address(rdx, rbx, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)), ! rax); } void TemplateTable::castore() { transition(itos, vtos); __ pop_i(rbx); // rax: value // rbx: index // rdx: array index_check(rdx, rbx); // prefer index in rbx ! __ resolve_for_write(OOP_NOT_NULL, rdx); ! __ movw(Address(rdx, rbx, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)), ! rax); } void TemplateTable::sastore() { castore(); --- 1154,1190 ---- __ pop_i(rbx); // rax: value // rbx: index // rdx: array index_check(rdx, rbx); // prefer index in rbx // Need to check whether array is boolean or byte // since both types share the bastore bytecode. __ load_klass(rcx, rdx); __ movl(rcx, Address(rcx, Klass::layout_helper_offset())); int diffbit = Klass::layout_helper_boolean_diffbit(); __ testl(rcx, diffbit); Label L_skip; __ jccb(Assembler::zero, L_skip); __ andl(rax, 1); // if it is a T_BOOLEAN array, mask the stored value to 0/1 __ bind(L_skip); ! __ access_store_at(T_BYTE, IN_HEAP, Address(rdx, rbx, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)), ! rax, noreg, noreg); } void TemplateTable::castore() { transition(itos, vtos); __ pop_i(rbx); // rax: value // rbx: index // rdx: array index_check(rdx, rbx); // prefer index in rbx ! __ access_store_at(T_CHAR, IN_HEAP, Address(rdx, rbx, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)), ! rax, noreg, noreg); } void TemplateTable::sastore() { castore();
*** 2862,2875 **** resolve_cache_and_index(byte_no, cache, index, sizeof(u2)); jvmti_post_field_access(cache, index, is_static, false); load_field_cp_cache_entry(obj, cache, index, off, flags, is_static); if (!is_static) pop_and_check_object(obj); - __ resolve_for_read(OOP_NOT_NULL, obj); const Address field(obj, off, Address::times_1, 0*wordSize); - NOT_LP64(const Address hi(obj, off, Address::times_1, 1*wordSize)); Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj, notDouble; __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift); // Make sure we don't need to mask edx after the above shift --- 2854,2865 ----
*** 2877,2887 **** __ andl(flags, ConstantPoolCacheEntry::tos_state_mask); __ jcc(Assembler::notZero, notByte); // btos ! __ load_signed_byte(rax, field); __ push(btos); // Rewrite bytecode to be faster if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx); } --- 2867,2877 ---- __ andl(flags, ConstantPoolCacheEntry::tos_state_mask); __ jcc(Assembler::notZero, notByte); // btos ! __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg); __ push(btos); // Rewrite bytecode to be faster if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx); }
*** 2890,2900 **** __ bind(notByte); __ cmpl(flags, ztos); __ jcc(Assembler::notEqual, notBool); // ztos (same code as btos) ! __ load_signed_byte(rax, field); __ push(ztos); // Rewrite bytecode to be faster if (!is_static && rc == may_rewrite) { // use btos rewriting, no truncating to t/f bit is needed for getfield. patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx); --- 2880,2890 ---- __ bind(notByte); __ cmpl(flags, ztos); __ jcc(Assembler::notEqual, notBool); // ztos (same code as btos) ! __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg, noreg); __ push(ztos); // Rewrite bytecode to be faster if (!is_static && rc == may_rewrite) { // use btos rewriting, no truncating to t/f bit is needed for getfield. patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
*** 2914,2924 **** __ bind(notObj); __ cmpl(flags, itos); __ jcc(Assembler::notEqual, notInt); // itos ! __ movl(rax, field); __ push(itos); // Rewrite bytecode to be faster if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx); } --- 2904,2914 ---- __ bind(notObj); __ cmpl(flags, itos); __ jcc(Assembler::notEqual, notInt); // itos ! __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg); __ push(itos); // Rewrite bytecode to be faster if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx); }
*** 2926,2936 **** __ bind(notInt); __ cmpl(flags, ctos); __ jcc(Assembler::notEqual, notChar); // ctos ! __ load_unsigned_short(rax, field); __ push(ctos); // Rewrite bytecode to be faster if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx); } --- 2916,2926 ---- __ bind(notInt); __ cmpl(flags, ctos); __ jcc(Assembler::notEqual, notChar); // ctos ! __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg); __ push(ctos); // Rewrite bytecode to be faster if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx); }
*** 2938,2948 **** __ bind(notChar); __ cmpl(flags, stos); __ jcc(Assembler::notEqual, notShort); // stos ! __ load_signed_short(rax, field); __ push(stos); // Rewrite bytecode to be faster if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_sgetfield, bc, rbx); } --- 2928,2938 ---- __ bind(notChar); __ cmpl(flags, stos); __ jcc(Assembler::notEqual, notShort); // stos ! __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg, noreg); __ push(stos); // Rewrite bytecode to be faster if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_sgetfield, bc, rbx); }
*** 2950,2982 **** __ bind(notShort); __ cmpl(flags, ltos); __ jcc(Assembler::notEqual, notLong); // ltos ! ! #ifndef _LP64 ! // Generate code as if volatile. There just aren't enough registers to // save that information and this code is faster than the test. ! __ fild_d(field); // Must load atomically ! __ subptr(rsp,2*wordSize); // Make space for store ! __ fistp_d(Address(rsp,0)); ! __ pop(rax); ! __ pop(rdx); ! #else ! __ movq(rax, field); ! #endif ! __ push(ltos); // Rewrite bytecode to be faster LP64_ONLY(if (!is_static && rc == may_rewrite) patch_bytecode(Bytecodes::_fast_lgetfield, bc, rbx)); __ jmp(Done); __ bind(notLong); __ cmpl(flags, ftos); __ jcc(Assembler::notEqual, notFloat); // ftos __ load_float(field); __ push(ftos); // Rewrite bytecode to be faster if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_fgetfield, bc, rbx); --- 2940,2964 ---- __ bind(notShort); __ cmpl(flags, ltos); __ jcc(Assembler::notEqual, notLong); // ltos ! // Generate code as if volatile (x86_32). There just aren't enough registers to // save that information and this code is faster than the test. ! __ access_load_at(T_LONG, IN_HEAP | MO_RELAXED, noreg /* ltos */, field, noreg, noreg); __ push(ltos); + // Rewrite bytecode to be faster LP64_ONLY(if (!is_static && rc == may_rewrite) patch_bytecode(Bytecodes::_fast_lgetfield, bc, rbx)); __ jmp(Done); __ bind(notLong); __ cmpl(flags, ftos); __ jcc(Assembler::notEqual, notFloat); // ftos + __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, field, noreg, noreg); __ load_float(field); __ push(ftos); // Rewrite bytecode to be faster if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_fgetfield, bc, rbx);
*** 2987,2997 **** #ifdef ASSERT __ cmpl(flags, dtos); __ jcc(Assembler::notEqual, notDouble); #endif // dtos ! __ load_double(field); __ push(dtos); // Rewrite bytecode to be faster if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_dgetfield, bc, rbx); } --- 2969,2979 ---- #ifdef ASSERT __ cmpl(flags, dtos); __ jcc(Assembler::notEqual, notDouble); #endif // dtos ! __ access_load_at(T_DOUBLE, IN_HEAP, noreg /* ftos */, field, noreg, noreg); __ push(dtos); // Rewrite bytecode to be faster if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_dgetfield, bc, rbx); }
*** 3146,3157 **** // btos { __ pop(btos); if (!is_static) pop_and_check_object(obj); ! __ resolve_for_write(OOP_NOT_NULL, obj); ! __ movb(field, rax); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no); } __ jmp(Done); } --- 3128,3138 ---- // btos { __ pop(btos); if (!is_static) pop_and_check_object(obj); ! __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no); } __ jmp(Done); }
*** 3162,3174 **** // ztos { __ pop(ztos); if (!is_static) pop_and_check_object(obj); ! __ resolve_for_write(OOP_NOT_NULL, obj); ! __ andl(rax, 0x1); ! __ movb(field, rax); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no); } __ jmp(Done); } --- 3143,3153 ---- // ztos { __ pop(ztos); if (!is_static) pop_and_check_object(obj); ! __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no); } __ jmp(Done); }
*** 3179,3189 **** // atos { __ pop(atos); if (!is_static) pop_and_check_object(obj); - __ resolve_for_write(OOP_NOT_NULL, obj); // Store into the field do_oop_store(_masm, field, rax); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no); } --- 3158,3167 ----
*** 3196,3207 **** // itos { __ pop(itos); if (!is_static) pop_and_check_object(obj); ! __ resolve_for_write(OOP_NOT_NULL, obj); ! __ movl(field, rax); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no); } __ jmp(Done); } --- 3174,3184 ---- // itos { __ pop(itos); if (!is_static) pop_and_check_object(obj); ! __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no); } __ jmp(Done); }
*** 3212,3223 **** // ctos { __ pop(ctos); if (!is_static) pop_and_check_object(obj); ! __ resolve_for_write(OOP_NOT_NULL, obj); ! __ movw(field, rax); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_cputfield, bc, rbx, true, byte_no); } __ jmp(Done); } --- 3189,3199 ---- // ctos { __ pop(ctos); if (!is_static) pop_and_check_object(obj); ! __ access_store_at(T_CHAR, IN_HEAP, field, rax, noreg, noreg); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_cputfield, bc, rbx, true, byte_no); } __ jmp(Done); }
*** 3228,3239 **** // stos { __ pop(stos); if (!is_static) pop_and_check_object(obj); ! __ resolve_for_write(OOP_NOT_NULL, obj); ! __ movw(field, rax); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_sputfield, bc, rbx, true, byte_no); } __ jmp(Done); } --- 3204,3214 ---- // stos { __ pop(stos); if (!is_static) pop_and_check_object(obj); ! __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_sputfield, bc, rbx, true, byte_no); } __ jmp(Done); }
*** 3245,3256 **** // ltos #ifdef _LP64 { __ pop(ltos); if (!is_static) pop_and_check_object(obj); ! __ resolve_for_write(OOP_NOT_NULL, obj); ! __ movq(field, rax); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_lputfield, bc, rbx, true, byte_no); } __ jmp(Done); } --- 3220,3230 ---- // ltos #ifdef _LP64 { __ pop(ltos); if (!is_static) pop_and_check_object(obj); ! __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos*/, noreg, noreg); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_lputfield, bc, rbx, true, byte_no); } __ jmp(Done); }
*** 3262,3288 **** __ pop(ltos); // overwrites rdx, do this after testing volatile. if (!is_static) pop_and_check_object(obj); // Replace with real volatile test ! __ push(rdx); ! __ push(rax); // Must update atomically with FIST ! __ fild_d(Address(rsp,0)); // So load into FPU register ! __ fistp_d(field); // and put into memory atomically ! __ addptr(rsp, 2*wordSize); // volatile_barrier(); volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad | Assembler::StoreStore)); // Don't rewrite volatile version __ jmp(notVolatile); __ bind(notVolatileLong); __ pop(ltos); // overwrites rdx if (!is_static) pop_and_check_object(obj); ! __ movptr(hi, rdx); ! __ movptr(field, rax); // Don't rewrite to _fast_lputfield for potential volatile case. __ jmp(notVolatile); } #endif // _LP64 --- 3236,3258 ---- __ pop(ltos); // overwrites rdx, do this after testing volatile. if (!is_static) pop_and_check_object(obj); // Replace with real volatile test ! __ access_store_at(T_LONG, IN_HEAP | MO_RELAXED, field, noreg /* ltos */, noreg, noreg); // volatile_barrier(); volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad | Assembler::StoreStore)); // Don't rewrite volatile version __ jmp(notVolatile); __ bind(notVolatileLong); __ pop(ltos); // overwrites rdx if (!is_static) pop_and_check_object(obj); ! ! __ access_store_at(T_LONG, IN_HEAP | MO_RELAXED, field, noreg /* ltos */, noreg, noreg); // Don't rewrite to _fast_lputfield for potential volatile case. __ jmp(notVolatile); } #endif // _LP64
*** 3292,3303 **** // ftos { __ pop(ftos); if (!is_static) pop_and_check_object(obj); ! __ resolve_for_write(OOP_NOT_NULL, obj); ! __ store_float(field); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx, true, byte_no); } __ jmp(Done); } --- 3262,3272 ---- // ftos { __ pop(ftos); if (!is_static) pop_and_check_object(obj); ! __ access_store_at(T_FLOAT, IN_HEAP, field, noreg /* ftos */, noreg, noreg); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx, true, byte_no); } __ jmp(Done); }
*** 3310,3321 **** // dtos { __ pop(dtos); if (!is_static) pop_and_check_object(obj); ! __ resolve_for_write(OOP_NOT_NULL, obj); ! __ store_double(field); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx, true, byte_no); } } --- 3279,3289 ---- // dtos { __ pop(dtos); if (!is_static) pop_and_check_object(obj); ! __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* ftos */, noreg, noreg); if (!is_static && rc == may_rewrite) { patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx, true, byte_no); } }
*** 3433,3443 **** __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift); __ andl(rdx, 0x1); // Get object from stack pop_and_check_object(rcx); - __ resolve_for_write(OOP_NOT_NULL, rcx); // field address const Address field(rcx, rbx, Address::times_1); // access field --- 3401,3410 ----
*** 3445,3478 **** case Bytecodes::_fast_aputfield: do_oop_store(_masm, field, rax); break; case Bytecodes::_fast_lputfield: #ifdef _LP64 ! __ movq(field, rax); #else __ stop("should not be rewritten"); #endif break; case Bytecodes::_fast_iputfield: ! __ movl(field, rax); break; case Bytecodes::_fast_zputfield: ! __ andl(rax, 0x1); // boolean is true if LSB is 1 ! // fall through to bputfield case Bytecodes::_fast_bputfield: ! __ movb(field, rax); break; case Bytecodes::_fast_sputfield: ! // fall through case Bytecodes::_fast_cputfield: ! __ movw(field, rax); break; case Bytecodes::_fast_fputfield: ! __ store_float(field); break; case Bytecodes::_fast_dputfield: ! __ store_double(field); break; default: ShouldNotReachHere(); } --- 3412,3446 ---- case Bytecodes::_fast_aputfield: do_oop_store(_masm, field, rax); break; case Bytecodes::_fast_lputfield: #ifdef _LP64 ! __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg); #else __ stop("should not be rewritten"); #endif break; case Bytecodes::_fast_iputfield: ! __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg); break; case Bytecodes::_fast_zputfield: ! __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg); ! break; case Bytecodes::_fast_bputfield: ! __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg); break; case Bytecodes::_fast_sputfield: ! __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg); ! break; case Bytecodes::_fast_cputfield: ! __ access_store_at(T_CHAR, IN_HEAP, field, rax, noreg, noreg); break; case Bytecodes::_fast_fputfield: ! __ access_store_at(T_FLOAT, IN_HEAP, field, noreg /* ftos*/, noreg, noreg); break; case Bytecodes::_fast_dputfield: ! __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* ftos*/, noreg, noreg); break; default: ShouldNotReachHere(); }
*** 3525,3567 **** ConstantPoolCacheEntry::f2_offset()))); // rax: object __ verify_oop(rax); __ null_check(rax); - __ resolve_for_read(OOP_NOT_NULL, rax); Address field(rax, rbx, Address::times_1); // access field switch (bytecode()) { case Bytecodes::_fast_agetfield: do_oop_load(_masm, field, rax); __ verify_oop(rax); break; case Bytecodes::_fast_lgetfield: #ifdef _LP64 ! __ movq(rax, field); #else __ stop("should not be rewritten"); #endif break; case Bytecodes::_fast_igetfield: ! __ movl(rax, field); break; case Bytecodes::_fast_bgetfield: ! __ movsbl(rax, field); break; case Bytecodes::_fast_sgetfield: ! __ load_signed_short(rax, field); break; case Bytecodes::_fast_cgetfield: ! __ load_unsigned_short(rax, field); break; case Bytecodes::_fast_fgetfield: ! __ load_float(field); break; case Bytecodes::_fast_dgetfield: ! __ load_double(field); break; default: ShouldNotReachHere(); } // [jk] not needed currently --- 3493,3534 ---- ConstantPoolCacheEntry::f2_offset()))); // rax: object __ verify_oop(rax); __ null_check(rax); Address field(rax, rbx, Address::times_1); // access field switch (bytecode()) { case Bytecodes::_fast_agetfield: do_oop_load(_masm, field, rax); __ verify_oop(rax); break; case Bytecodes::_fast_lgetfield: #ifdef _LP64 ! __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg, noreg); #else __ stop("should not be rewritten"); #endif break; case Bytecodes::_fast_igetfield: ! __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg); break; case Bytecodes::_fast_bgetfield: ! __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg); break; case Bytecodes::_fast_sgetfield: ! __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg, noreg); break; case Bytecodes::_fast_cgetfield: ! __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg); break; case Bytecodes::_fast_fgetfield: ! __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, field, noreg, noreg); break; case Bytecodes::_fast_dgetfield: ! __ access_load_at(T_DOUBLE, IN_HEAP, noreg /* ftos */, field, noreg, noreg); break; default: ShouldNotReachHere(); } // [jk] not needed currently
*** 3587,3608 **** ConstantPoolCacheEntry::f2_offset()))); // make sure exception is reported in correct bcp range (getfield is // next instruction) __ increment(rbcp); __ null_check(rax); - __ resolve_for_read(OOP_NOT_NULL, rax); const Address field = Address(rax, rbx, Address::times_1, 0*wordSize); switch (state) { case itos: ! __ movl(rax, field); break; case atos: do_oop_load(_masm, field, rax); __ verify_oop(rax); break; case ftos: ! __ load_float(field); break; default: ShouldNotReachHere(); } --- 3554,3574 ---- ConstantPoolCacheEntry::f2_offset()))); // make sure exception is reported in correct bcp range (getfield is // next instruction) __ increment(rbcp); __ null_check(rax); const Address field = Address(rax, rbx, Address::times_1, 0*wordSize); switch (state) { case itos: ! __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg); break; case atos: do_oop_load(_masm, field, rax); __ verify_oop(rax); break; case ftos: ! __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, field, noreg, noreg); break; default: ShouldNotReachHere(); }
< prev index next >