< prev index next >
src/hotspot/cpu/x86/templateTable_x86.cpp
Print this page
rev 50390 : 8200623: Primitive heap access for interpreter BarrierSetAssembler/x86
*** 768,813 ****
void TemplateTable::iaload() {
transition(itos, itos);
// rax: index
// rdx: array
index_check(rdx, rax); // kills rbx
! __ movl(rax, Address(rdx, rax,
! Address::times_4,
! arrayOopDesc::base_offset_in_bytes(T_INT)));
}
void TemplateTable::laload() {
transition(itos, ltos);
// rax: index
// rdx: array
index_check(rdx, rax); // kills rbx
NOT_LP64(__ mov(rbx, rax));
// rbx,: index
! __ movptr(rax, Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 0 * wordSize));
! NOT_LP64(__ movl(rdx, Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 1 * wordSize)));
}
void TemplateTable::faload() {
transition(itos, ftos);
// rax: index
// rdx: array
index_check(rdx, rax); // kills rbx
! __ load_float(Address(rdx, rax,
Address::times_4,
! arrayOopDesc::base_offset_in_bytes(T_FLOAT)));
}
void TemplateTable::daload() {
transition(itos, dtos);
// rax: index
// rdx: array
index_check(rdx, rax); // kills rbx
! __ load_double(Address(rdx, rax,
Address::times_8,
! arrayOopDesc::base_offset_in_bytes(T_DOUBLE)));
}
void TemplateTable::aaload() {
transition(itos, atos);
// rax: index
--- 768,820 ----
void TemplateTable::iaload() {
transition(itos, itos);
// rax: index
// rdx: array
index_check(rdx, rax); // kills rbx
! __ access_load_at(T_INT, IN_HEAP | IN_HEAP_ARRAY, rax,
! Address(rdx, rax, Address::times_4,
! arrayOopDesc::base_offset_in_bytes(T_INT)),
! noreg, noreg);
}
void TemplateTable::laload() {
transition(itos, ltos);
// rax: index
// rdx: array
index_check(rdx, rax); // kills rbx
NOT_LP64(__ mov(rbx, rax));
// rbx,: index
! __ access_load_at(T_LONG, IN_HEAP | IN_HEAP_ARRAY, noreg /* ltos */,
! Address(rdx, rbx, Address::times_8,
! arrayOopDesc::base_offset_in_bytes(T_LONG)),
! noreg, noreg);
}
void TemplateTable::faload() {
transition(itos, ftos);
// rax: index
// rdx: array
index_check(rdx, rax); // kills rbx
! __ access_load_at(T_FLOAT, IN_HEAP | IN_HEAP_ARRAY, noreg /* ftos */,
! Address(rdx, rax,
Address::times_4,
! arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
! noreg, noreg);
}
void TemplateTable::daload() {
transition(itos, dtos);
// rax: index
// rdx: array
index_check(rdx, rax); // kills rbx
! __ access_load_at(T_DOUBLE, IN_HEAP | IN_HEAP_ARRAY, noreg /* dtos */,
! Address(rdx, rax,
Address::times_8,
! arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
! noreg, noreg);
}
void TemplateTable::aaload() {
transition(itos, atos);
// rax: index
*** 824,842 ****
void TemplateTable::baload() {
transition(itos, itos);
// rax: index
// rdx: array
index_check(rdx, rax); // kills rbx
! __ load_signed_byte(rax, Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)));
}
void TemplateTable::caload() {
transition(itos, itos);
// rax: index
// rdx: array
index_check(rdx, rax); // kills rbx
! __ load_unsigned_short(rax, Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)));
}
// iload followed by caload frequent pair
void TemplateTable::fast_icaload() {
transition(vtos, itos);
--- 831,853 ----
void TemplateTable::baload() {
transition(itos, itos);
// rax: index
// rdx: array
index_check(rdx, rax); // kills rbx
! __ access_load_at(T_BYTE, IN_HEAP | IN_HEAP_ARRAY, rax,
! Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)),
! noreg, noreg);
}
void TemplateTable::caload() {
transition(itos, itos);
// rax: index
// rdx: array
index_check(rdx, rax); // kills rbx
! __ access_load_at(T_CHAR, IN_HEAP | IN_HEAP_ARRAY, rax,
! Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
! noreg, noreg);
}
// iload followed by caload frequent pair
void TemplateTable::fast_icaload() {
transition(vtos, itos);
*** 845,867 ****
__ movl(rax, iaddress(rbx));
// rax: index
// rdx: array
index_check(rdx, rax); // kills rbx
! __ load_unsigned_short(rax,
! Address(rdx, rax,
! Address::times_2,
! arrayOopDesc::base_offset_in_bytes(T_CHAR)));
}
void TemplateTable::saload() {
transition(itos, itos);
// rax: index
// rdx: array
index_check(rdx, rax); // kills rbx
! __ load_signed_short(rax, Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_SHORT)));
}
void TemplateTable::iload(int n) {
transition(vtos, itos);
__ movl(rax, iaddress(n));
--- 856,879 ----
__ movl(rax, iaddress(rbx));
// rax: index
// rdx: array
index_check(rdx, rax); // kills rbx
! __ access_load_at(T_CHAR, IN_HEAP | IN_HEAP_ARRAY, rax,
! Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
! noreg, noreg);
}
void TemplateTable::saload() {
transition(itos, itos);
// rax: index
// rdx: array
index_check(rdx, rax); // kills rbx
! __ access_load_at(T_SHORT, IN_HEAP | IN_HEAP_ARRAY, rax,
! Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_SHORT)),
! noreg, noreg);
}
void TemplateTable::iload(int n) {
transition(vtos, itos);
__ movl(rax, iaddress(n));
*** 1049,1095 ****
__ pop_i(rbx);
// rax: value
// rbx: index
// rdx: array
index_check(rdx, rbx); // prefer index in rbx
! __ movl(Address(rdx, rbx,
! Address::times_4,
arrayOopDesc::base_offset_in_bytes(T_INT)),
! rax);
}
void TemplateTable::lastore() {
transition(ltos, vtos);
__ pop_i(rbx);
// rax,: low(value)
// rcx: array
// rdx: high(value)
index_check(rcx, rbx); // prefer index in rbx,
// rbx,: index
! __ movptr(Address(rcx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 0 * wordSize), rax);
! NOT_LP64(__ movl(Address(rcx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 1 * wordSize), rdx));
}
void TemplateTable::fastore() {
transition(ftos, vtos);
__ pop_i(rbx);
// value is in UseSSE >= 1 ? xmm0 : ST(0)
// rbx: index
// rdx: array
index_check(rdx, rbx); // prefer index in rbx
! __ store_float(Address(rdx, rbx, Address::times_4, arrayOopDesc::base_offset_in_bytes(T_FLOAT)));
}
void TemplateTable::dastore() {
transition(dtos, vtos);
__ pop_i(rbx);
// value is in UseSSE >= 2 ? xmm0 : ST(0)
// rbx: index
// rdx: array
index_check(rdx, rbx); // prefer index in rbx
! __ store_double(Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_DOUBLE)));
}
void TemplateTable::aastore() {
Label is_null, ok_is_subtype, done;
transition(vtos, vtos);
--- 1061,1115 ----
__ pop_i(rbx);
// rax: value
// rbx: index
// rdx: array
index_check(rdx, rbx); // prefer index in rbx
! __ access_store_at(T_INT, IN_HEAP | IN_HEAP_ARRAY,
! Address(rdx, rbx, Address::times_4,
arrayOopDesc::base_offset_in_bytes(T_INT)),
! rax, noreg, noreg);
}
void TemplateTable::lastore() {
transition(ltos, vtos);
__ pop_i(rbx);
// rax,: low(value)
// rcx: array
// rdx: high(value)
index_check(rcx, rbx); // prefer index in rbx,
// rbx,: index
! __ access_store_at(T_LONG, IN_HEAP | IN_HEAP_ARRAY,
! Address(rcx, rbx, Address::times_8,
! arrayOopDesc::base_offset_in_bytes(T_LONG)),
! noreg /* ltos */, noreg, noreg);
}
void TemplateTable::fastore() {
transition(ftos, vtos);
__ pop_i(rbx);
// value is in UseSSE >= 1 ? xmm0 : ST(0)
// rbx: index
// rdx: array
index_check(rdx, rbx); // prefer index in rbx
! __ access_store_at(T_FLOAT, IN_HEAP | IN_HEAP_ARRAY,
! Address(rdx, rbx, Address::times_4,
! arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
! noreg /* ftos */, noreg, noreg);
}
void TemplateTable::dastore() {
transition(dtos, vtos);
__ pop_i(rbx);
// value is in UseSSE >= 2 ? xmm0 : ST(0)
// rbx: index
// rdx: array
index_check(rdx, rbx); // prefer index in rbx
! __ access_store_at(T_DOUBLE, IN_HEAP | IN_HEAP_ARRAY,
! Address(rdx, rbx, Address::times_8,
! arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
! noreg /* dtos */, noreg, noreg);
}
void TemplateTable::aastore() {
Label is_null, ok_is_subtype, done;
transition(vtos, vtos);
*** 1158,1184 ****
__ testl(rcx, diffbit);
Label L_skip;
__ jccb(Assembler::zero, L_skip);
__ andl(rax, 1); // if it is a T_BOOLEAN array, mask the stored value to 0/1
__ bind(L_skip);
! __ movb(Address(rdx, rbx,
! Address::times_1,
arrayOopDesc::base_offset_in_bytes(T_BYTE)),
! rax);
}
void TemplateTable::castore() {
transition(itos, vtos);
__ pop_i(rbx);
// rax: value
// rbx: index
// rdx: array
index_check(rdx, rbx); // prefer index in rbx
! __ movw(Address(rdx, rbx,
! Address::times_2,
arrayOopDesc::base_offset_in_bytes(T_CHAR)),
! rax);
}
void TemplateTable::sastore() {
castore();
--- 1178,1204 ----
__ testl(rcx, diffbit);
Label L_skip;
__ jccb(Assembler::zero, L_skip);
__ andl(rax, 1); // if it is a T_BOOLEAN array, mask the stored value to 0/1
__ bind(L_skip);
! __ access_store_at(T_BYTE, IN_HEAP | IN_HEAP_ARRAY,
! Address(rdx, rbx,Address::times_1,
arrayOopDesc::base_offset_in_bytes(T_BYTE)),
! rax, noreg, noreg);
}
void TemplateTable::castore() {
transition(itos, vtos);
__ pop_i(rbx);
// rax: value
// rbx: index
// rdx: array
index_check(rdx, rbx); // prefer index in rbx
! __ access_store_at(T_CHAR, IN_HEAP | IN_HEAP_ARRAY,
! Address(rdx, rbx, Address::times_2,
arrayOopDesc::base_offset_in_bytes(T_CHAR)),
! rax, noreg, noreg);
}
void TemplateTable::sastore() {
castore();
*** 2850,2860 ****
load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
if (!is_static) pop_and_check_object(obj);
const Address field(obj, off, Address::times_1, 0*wordSize);
- NOT_LP64(const Address hi(obj, off, Address::times_1, 1*wordSize));
Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj, notDouble;
__ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
// Make sure we don't need to mask edx after the above shift
--- 2870,2879 ----
*** 2862,2872 ****
__ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
__ jcc(Assembler::notZero, notByte);
// btos
! __ load_signed_byte(rax, field);
__ push(btos);
// Rewrite bytecode to be faster
if (!is_static && rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
}
--- 2881,2891 ----
__ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
__ jcc(Assembler::notZero, notByte);
// btos
! __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
__ push(btos);
// Rewrite bytecode to be faster
if (!is_static && rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
}
*** 2875,2885 ****
__ bind(notByte);
__ cmpl(flags, ztos);
__ jcc(Assembler::notEqual, notBool);
// ztos (same code as btos)
! __ load_signed_byte(rax, field);
__ push(ztos);
// Rewrite bytecode to be faster
if (!is_static && rc == may_rewrite) {
// use btos rewriting, no truncating to t/f bit is needed for getfield.
patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
--- 2894,2904 ----
__ bind(notByte);
__ cmpl(flags, ztos);
__ jcc(Assembler::notEqual, notBool);
// ztos (same code as btos)
! __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg, noreg);
__ push(ztos);
// Rewrite bytecode to be faster
if (!is_static && rc == may_rewrite) {
// use btos rewriting, no truncating to t/f bit is needed for getfield.
patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
*** 2899,2909 ****
__ bind(notObj);
__ cmpl(flags, itos);
__ jcc(Assembler::notEqual, notInt);
// itos
! __ movl(rax, field);
__ push(itos);
// Rewrite bytecode to be faster
if (!is_static && rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
}
--- 2918,2928 ----
__ bind(notObj);
__ cmpl(flags, itos);
__ jcc(Assembler::notEqual, notInt);
// itos
! __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
__ push(itos);
// Rewrite bytecode to be faster
if (!is_static && rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
}
*** 2911,2921 ****
__ bind(notInt);
__ cmpl(flags, ctos);
__ jcc(Assembler::notEqual, notChar);
// ctos
! __ load_unsigned_short(rax, field);
__ push(ctos);
// Rewrite bytecode to be faster
if (!is_static && rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);
}
--- 2930,2940 ----
__ bind(notInt);
__ cmpl(flags, ctos);
__ jcc(Assembler::notEqual, notChar);
// ctos
! __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg);
__ push(ctos);
// Rewrite bytecode to be faster
if (!is_static && rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);
}
*** 2923,2933 ****
__ bind(notChar);
__ cmpl(flags, stos);
__ jcc(Assembler::notEqual, notShort);
// stos
! __ load_signed_short(rax, field);
__ push(stos);
// Rewrite bytecode to be faster
if (!is_static && rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_sgetfield, bc, rbx);
}
--- 2942,2952 ----
__ bind(notChar);
__ cmpl(flags, stos);
__ jcc(Assembler::notEqual, notShort);
// stos
! __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg, noreg);
__ push(stos);
// Rewrite bytecode to be faster
if (!is_static && rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_sgetfield, bc, rbx);
}
*** 2935,2968 ****
__ bind(notShort);
__ cmpl(flags, ltos);
__ jcc(Assembler::notEqual, notLong);
// ltos
!
! #ifndef _LP64
! // Generate code as if volatile. There just aren't enough registers to
// save that information and this code is faster than the test.
! __ fild_d(field); // Must load atomically
! __ subptr(rsp,2*wordSize); // Make space for store
! __ fistp_d(Address(rsp,0));
! __ pop(rax);
! __ pop(rdx);
! #else
! __ movq(rax, field);
! #endif
!
__ push(ltos);
// Rewrite bytecode to be faster
LP64_ONLY(if (!is_static && rc == may_rewrite) patch_bytecode(Bytecodes::_fast_lgetfield, bc, rbx));
__ jmp(Done);
__ bind(notLong);
__ cmpl(flags, ftos);
__ jcc(Assembler::notEqual, notFloat);
// ftos
! __ load_float(field);
__ push(ftos);
// Rewrite bytecode to be faster
if (!is_static && rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_fgetfield, bc, rbx);
}
--- 2954,2977 ----
__ bind(notShort);
__ cmpl(flags, ltos);
__ jcc(Assembler::notEqual, notLong);
// ltos
! // Generate code as if volatile (x86_32). There just aren't enough registers to
// save that information and this code is faster than the test.
! __ access_load_at(T_LONG, IN_HEAP | MO_RELAXED, noreg /* ltos */, field, noreg, noreg);
__ push(ltos);
// Rewrite bytecode to be faster
LP64_ONLY(if (!is_static && rc == may_rewrite) patch_bytecode(Bytecodes::_fast_lgetfield, bc, rbx));
__ jmp(Done);
__ bind(notLong);
__ cmpl(flags, ftos);
__ jcc(Assembler::notEqual, notFloat);
// ftos
! __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
__ push(ftos);
// Rewrite bytecode to be faster
if (!is_static && rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_fgetfield, bc, rbx);
}
*** 2972,2982 ****
#ifdef ASSERT
__ cmpl(flags, dtos);
__ jcc(Assembler::notEqual, notDouble);
#endif
// dtos
! __ load_double(field);
__ push(dtos);
// Rewrite bytecode to be faster
if (!is_static && rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_dgetfield, bc, rbx);
}
--- 2981,2991 ----
#ifdef ASSERT
__ cmpl(flags, dtos);
__ jcc(Assembler::notEqual, notDouble);
#endif
// dtos
! __ access_load_at(T_DOUBLE, IN_HEAP, noreg /* dtos */, field, noreg, noreg);
__ push(dtos);
// Rewrite bytecode to be faster
if (!is_static && rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_dgetfield, bc, rbx);
}
*** 3131,3141 ****
// btos
{
__ pop(btos);
if (!is_static) pop_and_check_object(obj);
! __ movb(field, rax);
if (!is_static && rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
}
__ jmp(Done);
}
--- 3140,3150 ----
// btos
{
__ pop(btos);
if (!is_static) pop_and_check_object(obj);
! __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg);
if (!is_static && rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
}
__ jmp(Done);
}
*** 3146,3157 ****
// ztos
{
__ pop(ztos);
if (!is_static) pop_and_check_object(obj);
! __ andl(rax, 0x1);
! __ movb(field, rax);
if (!is_static && rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
}
__ jmp(Done);
}
--- 3155,3165 ----
// ztos
{
__ pop(ztos);
if (!is_static) pop_and_check_object(obj);
! __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg);
if (!is_static && rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
}
__ jmp(Done);
}
*** 3178,3188 ****
// itos
{
__ pop(itos);
if (!is_static) pop_and_check_object(obj);
! __ movl(field, rax);
if (!is_static && rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
}
__ jmp(Done);
}
--- 3186,3196 ----
// itos
{
__ pop(itos);
if (!is_static) pop_and_check_object(obj);
! __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg);
if (!is_static && rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
}
__ jmp(Done);
}
*** 3193,3203 ****
// ctos
{
__ pop(ctos);
if (!is_static) pop_and_check_object(obj);
! __ movw(field, rax);
if (!is_static && rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_cputfield, bc, rbx, true, byte_no);
}
__ jmp(Done);
}
--- 3201,3211 ----
// ctos
{
__ pop(ctos);
if (!is_static) pop_and_check_object(obj);
! __ access_store_at(T_CHAR, IN_HEAP, field, rax, noreg, noreg);
if (!is_static && rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_cputfield, bc, rbx, true, byte_no);
}
__ jmp(Done);
}
*** 3208,3218 ****
// stos
{
__ pop(stos);
if (!is_static) pop_and_check_object(obj);
! __ movw(field, rax);
if (!is_static && rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_sputfield, bc, rbx, true, byte_no);
}
__ jmp(Done);
}
--- 3216,3226 ----
// stos
{
__ pop(stos);
if (!is_static) pop_and_check_object(obj);
! __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg);
if (!is_static && rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_sputfield, bc, rbx, true, byte_no);
}
__ jmp(Done);
}
*** 3224,3234 ****
// ltos
#ifdef _LP64
{
__ pop(ltos);
if (!is_static) pop_and_check_object(obj);
! __ movq(field, rax);
if (!is_static && rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_lputfield, bc, rbx, true, byte_no);
}
__ jmp(Done);
}
--- 3232,3242 ----
// ltos
#ifdef _LP64
{
__ pop(ltos);
if (!is_static) pop_and_check_object(obj);
! __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos*/, noreg, noreg);
if (!is_static && rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_lputfield, bc, rbx, true, byte_no);
}
__ jmp(Done);
}
*** 3240,3266 ****
__ pop(ltos); // overwrites rdx, do this after testing volatile.
if (!is_static) pop_and_check_object(obj);
// Replace with real volatile test
! __ push(rdx);
! __ push(rax); // Must update atomically with FIST
! __ fild_d(Address(rsp,0)); // So load into FPU register
! __ fistp_d(field); // and put into memory atomically
! __ addptr(rsp, 2*wordSize);
// volatile_barrier();
volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
Assembler::StoreStore));
// Don't rewrite volatile version
__ jmp(notVolatile);
__ bind(notVolatileLong);
__ pop(ltos); // overwrites rdx
if (!is_static) pop_and_check_object(obj);
! __ movptr(hi, rdx);
! __ movptr(field, rax);
// Don't rewrite to _fast_lputfield for potential volatile case.
__ jmp(notVolatile);
}
#endif // _LP64
--- 3248,3269 ----
__ pop(ltos); // overwrites rdx, do this after testing volatile.
if (!is_static) pop_and_check_object(obj);
// Replace with real volatile test
! __ access_store_at(T_LONG, IN_HEAP | MO_RELAXED, field, noreg /* ltos */, noreg, noreg);
// volatile_barrier();
volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
Assembler::StoreStore));
// Don't rewrite volatile version
__ jmp(notVolatile);
__ bind(notVolatileLong);
__ pop(ltos); // overwrites rdx
if (!is_static) pop_and_check_object(obj);
! __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg);
// Don't rewrite to _fast_lputfield for potential volatile case.
__ jmp(notVolatile);
}
#endif // _LP64
*** 3270,3280 ****
// ftos
{
__ pop(ftos);
if (!is_static) pop_and_check_object(obj);
! __ store_float(field);
if (!is_static && rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx, true, byte_no);
}
__ jmp(Done);
}
--- 3273,3283 ----
// ftos
{
__ pop(ftos);
if (!is_static) pop_and_check_object(obj);
! __ access_store_at(T_FLOAT, IN_HEAP, field, noreg /* ftos */, noreg, noreg);
if (!is_static && rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx, true, byte_no);
}
__ jmp(Done);
}
*** 3287,3297 ****
// dtos
{
__ pop(dtos);
if (!is_static) pop_and_check_object(obj);
! __ store_double(field);
if (!is_static && rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx, true, byte_no);
}
}
--- 3290,3300 ----
// dtos
{
__ pop(dtos);
if (!is_static) pop_and_check_object(obj);
! __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* dtos */, noreg, noreg);
if (!is_static && rc == may_rewrite) {
patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx, true, byte_no);
}
}
*** 3420,3453 ****
case Bytecodes::_fast_aputfield:
do_oop_store(_masm, field, rax);
break;
case Bytecodes::_fast_lputfield:
#ifdef _LP64
! __ movq(field, rax);
#else
__ stop("should not be rewritten");
#endif
break;
case Bytecodes::_fast_iputfield:
! __ movl(field, rax);
break;
case Bytecodes::_fast_zputfield:
! __ andl(rax, 0x1); // boolean is true if LSB is 1
! // fall through to bputfield
case Bytecodes::_fast_bputfield:
! __ movb(field, rax);
break;
case Bytecodes::_fast_sputfield:
! // fall through
case Bytecodes::_fast_cputfield:
! __ movw(field, rax);
break;
case Bytecodes::_fast_fputfield:
! __ store_float(field);
break;
case Bytecodes::_fast_dputfield:
! __ store_double(field);
break;
default:
ShouldNotReachHere();
}
--- 3423,3457 ----
case Bytecodes::_fast_aputfield:
do_oop_store(_masm, field, rax);
break;
case Bytecodes::_fast_lputfield:
#ifdef _LP64
! __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg);
#else
__ stop("should not be rewritten");
#endif
break;
case Bytecodes::_fast_iputfield:
! __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg);
break;
case Bytecodes::_fast_zputfield:
! __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg);
! break;
case Bytecodes::_fast_bputfield:
! __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg);
break;
case Bytecodes::_fast_sputfield:
! __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg);
! break;
case Bytecodes::_fast_cputfield:
! __ access_store_at(T_CHAR, IN_HEAP, field, rax, noreg, noreg);
break;
case Bytecodes::_fast_fputfield:
! __ access_store_at(T_FLOAT, IN_HEAP, field, noreg /* ftos*/, noreg, noreg);
break;
case Bytecodes::_fast_dputfield:
! __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* dtos*/, noreg, noreg);
break;
default:
ShouldNotReachHere();
}
*** 3510,3541 ****
do_oop_load(_masm, field, rax);
__ verify_oop(rax);
break;
case Bytecodes::_fast_lgetfield:
#ifdef _LP64
! __ movq(rax, field);
#else
__ stop("should not be rewritten");
#endif
break;
case Bytecodes::_fast_igetfield:
! __ movl(rax, field);
break;
case Bytecodes::_fast_bgetfield:
! __ movsbl(rax, field);
break;
case Bytecodes::_fast_sgetfield:
! __ load_signed_short(rax, field);
break;
case Bytecodes::_fast_cgetfield:
! __ load_unsigned_short(rax, field);
break;
case Bytecodes::_fast_fgetfield:
! __ load_float(field);
break;
case Bytecodes::_fast_dgetfield:
! __ load_double(field);
break;
default:
ShouldNotReachHere();
}
// [jk] not needed currently
--- 3514,3545 ----
do_oop_load(_masm, field, rax);
__ verify_oop(rax);
break;
case Bytecodes::_fast_lgetfield:
#ifdef _LP64
! __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg, noreg);
#else
__ stop("should not be rewritten");
#endif
break;
case Bytecodes::_fast_igetfield:
! __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
break;
case Bytecodes::_fast_bgetfield:
! __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
break;
case Bytecodes::_fast_sgetfield:
! __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg, noreg);
break;
case Bytecodes::_fast_cgetfield:
! __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg);
break;
case Bytecodes::_fast_fgetfield:
! __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
break;
case Bytecodes::_fast_dgetfield:
! __ access_load_at(T_DOUBLE, IN_HEAP, noreg /* dtos */, field, noreg, noreg);
break;
default:
ShouldNotReachHere();
}
// [jk] not needed currently
*** 3564,3581 ****
__ increment(rbcp);
__ null_check(rax);
const Address field = Address(rax, rbx, Address::times_1, 0*wordSize);
switch (state) {
case itos:
! __ movl(rax, field);
break;
case atos:
do_oop_load(_masm, field, rax);
__ verify_oop(rax);
break;
case ftos:
! __ load_float(field);
break;
default:
ShouldNotReachHere();
}
--- 3568,3585 ----
__ increment(rbcp);
__ null_check(rax);
const Address field = Address(rax, rbx, Address::times_1, 0*wordSize);
switch (state) {
case itos:
! __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
break;
case atos:
do_oop_load(_masm, field, rax);
__ verify_oop(rax);
break;
case ftos:
! __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
break;
default:
ShouldNotReachHere();
}
< prev index next >