< prev index next >

src/hotspot/cpu/x86/templateTable_x86.cpp

Print this page
rev 50081 : [mq]: primitives2.patch

@@ -765,58 +765,60 @@
 void TemplateTable::iaload() {
   transition(itos, itos);
   // rax: index
   // rdx: array
   index_check(rdx, rax); // kills rbx
-  __ resolve_for_read(OOP_NOT_NULL, rdx);
-  __ movl(rax, Address(rdx, rax,
+  __ access_load_at(T_INT, IN_HEAP, rax, Address(rdx, rax,
                        Address::times_4,
-                       arrayOopDesc::base_offset_in_bytes(T_INT)));
+                                                 arrayOopDesc::base_offset_in_bytes(T_INT)),
+                    noreg, noreg);
 }
 
 void TemplateTable::laload() {
   transition(itos, ltos);
   // rax: index
   // rdx: array
   index_check(rdx, rax); // kills rbx
   NOT_LP64(__ mov(rbx, rax));
   // rbx,: index
-  __ resolve_for_read(OOP_NOT_NULL, rdx);
-  __ movptr(rax, Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 0 * wordSize));
-  NOT_LP64(__ movl(rdx, Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 1 * wordSize)));
+  __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */,
+                    Address(rdx, rbx, Address::times_8,
+                            arrayOopDesc::base_offset_in_bytes(T_LONG)),
+                    noreg, noreg);
 }
 
 
 
 void TemplateTable::faload() {
   transition(itos, ftos);
   // rax: index
   // rdx: array
   index_check(rdx, rax); // kills rbx
-  __ resolve_for_read(OOP_NOT_NULL, rdx);
-  __ load_float(Address(rdx, rax,
+  __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */,
+                    Address(rdx, rax,
                         Address::times_4,
-                        arrayOopDesc::base_offset_in_bytes(T_FLOAT)));
+                            arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
+                    noreg, noreg);
 }
 
 void TemplateTable::daload() {
   transition(itos, dtos);
   // rax: index
   // rdx: array
   index_check(rdx, rax); // kills rbx
-  __ resolve_for_read(OOP_NOT_NULL, rdx);
-  __ load_double(Address(rdx, rax,
+  __ access_load_at(T_DOUBLE, IN_HEAP, noreg /* dtos */,
+                    Address(rdx, rax,
                          Address::times_8,
-                         arrayOopDesc::base_offset_in_bytes(T_DOUBLE)));
+                            arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
+                    noreg, noreg);
 }
 
 void TemplateTable::aaload() {
   transition(itos, atos);
   // rax: index
   // rdx: array
   index_check(rdx, rax); // kills rbx
-  __ resolve_for_read(OOP_NOT_NULL, rdx);
   do_oop_load(_masm,
               Address(rdx, rax,
                       UseCompressedOops ? Address::times_4 : Address::times_ptr,
                       arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
               rax,

@@ -826,21 +828,23 @@
 void TemplateTable::baload() {
   transition(itos, itos);
   // rax: index
   // rdx: array
   index_check(rdx, rax); // kills rbx
-  __ resolve_for_read(OOP_NOT_NULL, rdx);
-  __ load_signed_byte(rax, Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)));
+  __ access_load_at(T_BYTE, IN_HEAP, rax,
+                    Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)),
+                    noreg, noreg);
 }
 
 void TemplateTable::caload() {
   transition(itos, itos);
   // rax: index
   // rdx: array
   index_check(rdx, rax); // kills rbx
-  __ resolve_for_read(OOP_NOT_NULL, rdx);
-  __ load_unsigned_short(rax, Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)));
+  __ access_load_at(T_CHAR, IN_HEAP, rax,
+                    Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
+                    noreg, noreg);
 }
 
 // iload followed by caload frequent pair
 void TemplateTable::fast_icaload() {
   transition(vtos, itos);

@@ -849,25 +853,24 @@
   __ movl(rax, iaddress(rbx));
 
   // rax: index
   // rdx: array
   index_check(rdx, rax); // kills rbx
-  __ resolve_for_read(OOP_NOT_NULL, rdx);
-  __ load_unsigned_short(rax,
-                         Address(rdx, rax,
-                                 Address::times_2,
-                                 arrayOopDesc::base_offset_in_bytes(T_CHAR)));
+  __ access_load_at(T_CHAR, IN_HEAP, rax,
+                    Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
+                    noreg, noreg);
 }
 
 
 void TemplateTable::saload() {
   transition(itos, itos);
   // rax: index
   // rdx: array
   index_check(rdx, rax); // kills rbx
-  __ resolve_for_read(OOP_NOT_NULL, rdx);
-  __ load_signed_short(rax, Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_SHORT)));
+  __ access_load_at(T_SHORT, IN_HEAP, rax,
+                    Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_SHORT)),
+                    noreg, noreg);
 }
 
 void TemplateTable::iload(int n) {
   transition(vtos, itos);
   __ movl(rax, iaddress(n));

@@ -1055,51 +1058,45 @@
   __ pop_i(rbx);
   // rax: value
   // rbx: index
   // rdx: array
   index_check(rdx, rbx); // prefer index in rbx
-  __ resolve_for_write(OOP_NOT_NULL, rdx);
-  __ movl(Address(rdx, rbx,
+  __ access_store_at(T_INT, IN_HEAP, Address(rdx, rbx,
                   Address::times_4,
-                  arrayOopDesc::base_offset_in_bytes(T_INT)),
-          rax);
+                                             arrayOopDesc::base_offset_in_bytes(T_INT)), rax, noreg, noreg);
 }
 
 void TemplateTable::lastore() {
   transition(ltos, vtos);
   __ pop_i(rbx);
   // rax,: low(value)
   // rcx: array
   // rdx: high(value)
   index_check(rcx, rbx);  // prefer index in rbx,
   // rbx,: index
-  __ resolve_for_write(OOP_NOT_NULL, rcx);
-  __ movptr(Address(rcx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 0 * wordSize), rax);
-  NOT_LP64(__ movl(Address(rcx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 1 * wordSize), rdx));
+  __ access_store_at(T_LONG, IN_HEAP, Address(rcx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG)), noreg /* ltos */, noreg, noreg);
 }
 
 
 void TemplateTable::fastore() {
   transition(ftos, vtos);
   __ pop_i(rbx);
   // value is in UseSSE >= 1 ? xmm0 : ST(0)
   // rbx:  index
   // rdx:  array
   index_check(rdx, rbx); // prefer index in rbx
-  __ resolve_for_write(OOP_NOT_NULL, rdx);
-  __ store_float(Address(rdx, rbx, Address::times_4, arrayOopDesc::base_offset_in_bytes(T_FLOAT)));
+  __ access_store_at(T_FLOAT, IN_HEAP, Address(rdx, rbx, Address::times_4, arrayOopDesc::base_offset_in_bytes(T_FLOAT)), noreg /* ftos */, noreg, noreg);
 }
 
 void TemplateTable::dastore() {
   transition(dtos, vtos);
   __ pop_i(rbx);
   // value is in UseSSE >= 2 ? xmm0 : ST(0)
   // rbx:  index
   // rdx:  array
   index_check(rdx, rbx); // prefer index in rbx
-  __ resolve_for_write(OOP_NOT_NULL, rdx);
-  __ store_double(Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_DOUBLE)));
+  __ access_store_at(T_DOUBLE, IN_HEAP, Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_DOUBLE)), noreg /* ftos */, noreg, noreg);
 }
 
 void TemplateTable::aastore() {
   Label is_null, ok_is_subtype, done;
   transition(vtos, vtos);

@@ -1111,38 +1108,35 @@
   Address element_address(rdx, rcx,
                           UseCompressedOops? Address::times_4 : Address::times_ptr,
                           arrayOopDesc::base_offset_in_bytes(T_OBJECT));
 
   index_check_without_pop(rdx, rcx);     // kills rbx
-  __ resolve_for_write(OOP_NOT_NULL, rdx);
   __ testptr(rax, rax);
   __ jcc(Assembler::zero, is_null);
 
   // Move subklass into rbx
   __ load_klass(rbx, rax);
   // Move superklass into rax
   __ load_klass(rax, rdx);
   __ movptr(rax, Address(rax,
                          ObjArrayKlass::element_klass_offset()));
-  // Compress array + index*oopSize + 12 into a single register.  Frees rcx.
-  __ lea(rdx, element_address);
 
   // Generate subtype check.  Blows rcx, rdi
   // Superklass in rax.  Subklass in rbx.
   __ gen_subtype_check(rbx, ok_is_subtype);
-
   // Come here on failure
   // object is at TOS
   __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
 
   // Come here on success
   __ bind(ok_is_subtype);
 
   // Get the value we will store
   __ movptr(rax, at_tos());
+  __ movl(rcx, at_tos_p1()); // index
   // Now store using the appropriate barrier
-  do_oop_store(_masm, Address(rdx, 0), rax, IN_HEAP_ARRAY);
+  do_oop_store(_masm, element_address, rax, IN_HEAP_ARRAY);
   __ jmp(done);
 
   // Have a NULL in rax, rdx=array, ecx=index.  Store NULL at ary[idx]
   __ bind(is_null);
   __ profile_null_seen(rbx);

@@ -1160,39 +1154,37 @@
   __ pop_i(rbx);
   // rax: value
   // rbx: index
   // rdx: array
   index_check(rdx, rbx); // prefer index in rbx
-  __ resolve_for_write(OOP_NOT_NULL, rdx);
   // Need to check whether array is boolean or byte
   // since both types share the bastore bytecode.
   __ load_klass(rcx, rdx);
   __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
   int diffbit = Klass::layout_helper_boolean_diffbit();
   __ testl(rcx, diffbit);
   Label L_skip;
   __ jccb(Assembler::zero, L_skip);
   __ andl(rax, 1);  // if it is a T_BOOLEAN array, mask the stored value to 0/1
   __ bind(L_skip);
-  __ movb(Address(rdx, rbx,
+  __ access_store_at(T_BYTE, IN_HEAP, Address(rdx, rbx,
                   Address::times_1,
                   arrayOopDesc::base_offset_in_bytes(T_BYTE)),
-          rax);
+                     rax, noreg, noreg);
 }
 
 void TemplateTable::castore() {
   transition(itos, vtos);
   __ pop_i(rbx);
   // rax: value
   // rbx: index
   // rdx: array
   index_check(rdx, rbx);  // prefer index in rbx
-  __ resolve_for_write(OOP_NOT_NULL, rdx);
-  __ movw(Address(rdx, rbx,
+  __ access_store_at(T_CHAR, IN_HEAP, Address(rdx, rbx,
                   Address::times_2,
                   arrayOopDesc::base_offset_in_bytes(T_CHAR)),
-          rax);
+                     rax, noreg, noreg);
 }
 
 
 void TemplateTable::sastore() {
   castore();

@@ -2862,14 +2854,12 @@
   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
   jvmti_post_field_access(cache, index, is_static, false);
   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
 
   if (!is_static) pop_and_check_object(obj);
-  __ resolve_for_read(OOP_NOT_NULL, obj);
 
   const Address field(obj, off, Address::times_1, 0*wordSize);
-  NOT_LP64(const Address hi(obj, off, Address::times_1, 1*wordSize));
 
   Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj, notDouble;
 
   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
   // Make sure we don't need to mask edx after the above shift

@@ -2877,11 +2867,11 @@
 
   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
 
   __ jcc(Assembler::notZero, notByte);
   // btos
-  __ load_signed_byte(rax, field);
+  __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
   __ push(btos);
   // Rewrite bytecode to be faster
   if (!is_static && rc == may_rewrite) {
     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
   }

@@ -2890,11 +2880,11 @@
   __ bind(notByte);
   __ cmpl(flags, ztos);
   __ jcc(Assembler::notEqual, notBool);
 
   // ztos (same code as btos)
-  __ load_signed_byte(rax, field);
+  __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg, noreg);
   __ push(ztos);
   // Rewrite bytecode to be faster
   if (!is_static && rc == may_rewrite) {
     // use btos rewriting, no truncating to t/f bit is needed for getfield.
     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);

@@ -2914,11 +2904,11 @@
 
   __ bind(notObj);
   __ cmpl(flags, itos);
   __ jcc(Assembler::notEqual, notInt);
   // itos
-  __ movl(rax, field);
+  __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
   __ push(itos);
   // Rewrite bytecode to be faster
   if (!is_static && rc == may_rewrite) {
     patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
   }

@@ -2926,11 +2916,11 @@
 
   __ bind(notInt);
   __ cmpl(flags, ctos);
   __ jcc(Assembler::notEqual, notChar);
   // ctos
-  __ load_unsigned_short(rax, field);
+  __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg);
   __ push(ctos);
   // Rewrite bytecode to be faster
   if (!is_static && rc == may_rewrite) {
     patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);
   }

@@ -2938,11 +2928,11 @@
 
   __ bind(notChar);
   __ cmpl(flags, stos);
   __ jcc(Assembler::notEqual, notShort);
   // stos
-  __ load_signed_short(rax, field);
+  __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg, noreg);
   __ push(stos);
   // Rewrite bytecode to be faster
   if (!is_static && rc == may_rewrite) {
     patch_bytecode(Bytecodes::_fast_sgetfield, bc, rbx);
   }

@@ -2950,33 +2940,25 @@
 
   __ bind(notShort);
   __ cmpl(flags, ltos);
   __ jcc(Assembler::notEqual, notLong);
   // ltos
-
-#ifndef _LP64
-  // Generate code as if volatile.  There just aren't enough registers to
+    // Generate code as if volatile (x86_32).  There just aren't enough registers to
   // save that information and this code is faster than the test.
-  __ fild_d(field);                // Must load atomically
-  __ subptr(rsp,2*wordSize);    // Make space for store
-  __ fistp_d(Address(rsp,0));
-  __ pop(rax);
-  __ pop(rdx);
-#else
-  __ movq(rax, field);
-#endif
-
+  __ access_load_at(T_LONG, IN_HEAP | MO_RELAXED, noreg /* ltos */, field, noreg, noreg);
   __ push(ltos);
+
   // Rewrite bytecode to be faster
   LP64_ONLY(if (!is_static && rc == may_rewrite) patch_bytecode(Bytecodes::_fast_lgetfield, bc, rbx));
   __ jmp(Done);
 
   __ bind(notLong);
   __ cmpl(flags, ftos);
   __ jcc(Assembler::notEqual, notFloat);
   // ftos
 
+  __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
   __ load_float(field);
   __ push(ftos);
   // Rewrite bytecode to be faster
   if (!is_static && rc == may_rewrite) {
     patch_bytecode(Bytecodes::_fast_fgetfield, bc, rbx);

@@ -2987,11 +2969,11 @@
 #ifdef ASSERT
   __ cmpl(flags, dtos);
   __ jcc(Assembler::notEqual, notDouble);
 #endif
   // dtos
-  __ load_double(field);
+  __ access_load_at(T_DOUBLE, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
   __ push(dtos);
   // Rewrite bytecode to be faster
   if (!is_static && rc == may_rewrite) {
     patch_bytecode(Bytecodes::_fast_dgetfield, bc, rbx);
   }

@@ -3146,12 +3128,11 @@
 
   // btos
   {
     __ pop(btos);
     if (!is_static) pop_and_check_object(obj);
-    __ resolve_for_write(OOP_NOT_NULL, obj);
-    __ movb(field, rax);
+    __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg);
     if (!is_static && rc == may_rewrite) {
       patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
     }
     __ jmp(Done);
   }

@@ -3162,13 +3143,11 @@
 
   // ztos
   {
     __ pop(ztos);
     if (!is_static) pop_and_check_object(obj);
-    __ resolve_for_write(OOP_NOT_NULL, obj);
-    __ andl(rax, 0x1);
-    __ movb(field, rax);
+    __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg);
     if (!is_static && rc == may_rewrite) {
       patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
     }
     __ jmp(Done);
   }

@@ -3179,11 +3158,10 @@
 
   // atos
   {
     __ pop(atos);
     if (!is_static) pop_and_check_object(obj);
-    __ resolve_for_write(OOP_NOT_NULL, obj);
     // Store into the field
     do_oop_store(_masm, field, rax);
     if (!is_static && rc == may_rewrite) {
       patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
     }

@@ -3196,12 +3174,11 @@
 
   // itos
   {
     __ pop(itos);
     if (!is_static) pop_and_check_object(obj);
-    __ resolve_for_write(OOP_NOT_NULL, obj);
-    __ movl(field, rax);
+    __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg);
     if (!is_static && rc == may_rewrite) {
       patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
     }
     __ jmp(Done);
   }

@@ -3212,12 +3189,11 @@
 
   // ctos
   {
     __ pop(ctos);
     if (!is_static) pop_and_check_object(obj);
-    __ resolve_for_write(OOP_NOT_NULL, obj);
-    __ movw(field, rax);
+    __ access_store_at(T_CHAR, IN_HEAP, field, rax, noreg, noreg);
     if (!is_static && rc == may_rewrite) {
       patch_bytecode(Bytecodes::_fast_cputfield, bc, rbx, true, byte_no);
     }
     __ jmp(Done);
   }

@@ -3228,12 +3204,11 @@
 
   // stos
   {
     __ pop(stos);
     if (!is_static) pop_and_check_object(obj);
-    __ resolve_for_write(OOP_NOT_NULL, obj);
-    __ movw(field, rax);
+    __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg);
     if (!is_static && rc == may_rewrite) {
       patch_bytecode(Bytecodes::_fast_sputfield, bc, rbx, true, byte_no);
     }
     __ jmp(Done);
   }

@@ -3245,12 +3220,11 @@
   // ltos
 #ifdef _LP64
   {
     __ pop(ltos);
     if (!is_static) pop_and_check_object(obj);
-    __ resolve_for_write(OOP_NOT_NULL, obj);
-    __ movq(field, rax);
+    __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos*/, noreg, noreg);
     if (!is_static && rc == may_rewrite) {
       patch_bytecode(Bytecodes::_fast_lputfield, bc, rbx, true, byte_no);
     }
     __ jmp(Done);
   }

@@ -3262,27 +3236,23 @@
 
     __ pop(ltos);  // overwrites rdx, do this after testing volatile.
     if (!is_static) pop_and_check_object(obj);
 
     // Replace with real volatile test
-    __ push(rdx);
-    __ push(rax);                 // Must update atomically with FIST
-    __ fild_d(Address(rsp,0));    // So load into FPU register
-    __ fistp_d(field);            // and put into memory atomically
-    __ addptr(rsp, 2*wordSize);
+    __ access_store_at(T_LONG, IN_HEAP | MO_RELAXED, field, noreg /* ltos */, noreg, noreg);
     // volatile_barrier();
     volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
                                                  Assembler::StoreStore));
     // Don't rewrite volatile version
     __ jmp(notVolatile);
 
     __ bind(notVolatileLong);
 
     __ pop(ltos);  // overwrites rdx
     if (!is_static) pop_and_check_object(obj);
-    __ movptr(hi, rdx);
-    __ movptr(field, rax);
+
+    __ access_store_at(T_LONG, IN_HEAP | MO_RELAXED, field, noreg /* ltos */, noreg, noreg);
     // Don't rewrite to _fast_lputfield for potential volatile case.
     __ jmp(notVolatile);
   }
 #endif // _LP64
 

@@ -3292,12 +3262,11 @@
 
   // ftos
   {
     __ pop(ftos);
     if (!is_static) pop_and_check_object(obj);
-    __ resolve_for_write(OOP_NOT_NULL, obj);
-    __ store_float(field);
+    __ access_store_at(T_FLOAT, IN_HEAP, field, noreg /* ftos */, noreg, noreg);
     if (!is_static && rc == may_rewrite) {
       patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx, true, byte_no);
     }
     __ jmp(Done);
   }

@@ -3310,12 +3279,11 @@
 
   // dtos
   {
     __ pop(dtos);
     if (!is_static) pop_and_check_object(obj);
-    __ resolve_for_write(OOP_NOT_NULL, obj);
-    __ store_double(field);
+    __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* ftos */, noreg, noreg);
     if (!is_static && rc == may_rewrite) {
       patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx, true, byte_no);
     }
   }
 

@@ -3433,11 +3401,10 @@
   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
   __ andl(rdx, 0x1);
 
   // Get object from stack
   pop_and_check_object(rcx);
-  __ resolve_for_write(OOP_NOT_NULL, rcx);
 
   // field address
   const Address field(rcx, rbx, Address::times_1);
 
   // access field

@@ -3445,34 +3412,35 @@
   case Bytecodes::_fast_aputfield:
     do_oop_store(_masm, field, rax);
     break;
   case Bytecodes::_fast_lputfield:
 #ifdef _LP64
-  __ movq(field, rax);
+    __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg);
 #else
   __ stop("should not be rewritten");
 #endif
     break;
   case Bytecodes::_fast_iputfield:
-    __ movl(field, rax);
+    __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg);
     break;
   case Bytecodes::_fast_zputfield:
-    __ andl(rax, 0x1);  // boolean is true if LSB is 1
-    // fall through to bputfield
+    __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg);
+    break;
   case Bytecodes::_fast_bputfield:
-    __ movb(field, rax);
+    __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg);
     break;
   case Bytecodes::_fast_sputfield:
-    // fall through
+    __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg);
+    break;
   case Bytecodes::_fast_cputfield:
-    __ movw(field, rax);
+    __ access_store_at(T_CHAR, IN_HEAP, field, rax, noreg, noreg);
     break;
   case Bytecodes::_fast_fputfield:
-    __ store_float(field);
+    __ access_store_at(T_FLOAT, IN_HEAP, field, noreg /* ftos*/, noreg, noreg);
     break;
   case Bytecodes::_fast_dputfield:
-    __ store_double(field);
+    __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* ftos*/, noreg, noreg);
     break;
   default:
     ShouldNotReachHere();
   }
 

@@ -3525,43 +3493,42 @@
                                   ConstantPoolCacheEntry::f2_offset())));
 
   // rax: object
   __ verify_oop(rax);
   __ null_check(rax);
-  __ resolve_for_read(OOP_NOT_NULL, rax);
   Address field(rax, rbx, Address::times_1);
 
   // access field
   switch (bytecode()) {
   case Bytecodes::_fast_agetfield:
     do_oop_load(_masm, field, rax);
     __ verify_oop(rax);
     break;
   case Bytecodes::_fast_lgetfield:
 #ifdef _LP64
-  __ movq(rax, field);
+    __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg, noreg);
 #else
   __ stop("should not be rewritten");
 #endif
     break;
   case Bytecodes::_fast_igetfield:
-    __ movl(rax, field);
+    __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
     break;
   case Bytecodes::_fast_bgetfield:
-    __ movsbl(rax, field);
+    __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
     break;
   case Bytecodes::_fast_sgetfield:
-    __ load_signed_short(rax, field);
+    __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg, noreg);
     break;
   case Bytecodes::_fast_cgetfield:
-    __ load_unsigned_short(rax, field);
+    __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg);
     break;
   case Bytecodes::_fast_fgetfield:
-    __ load_float(field);
+    __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
     break;
   case Bytecodes::_fast_dgetfield:
-    __ load_double(field);
+    __ access_load_at(T_DOUBLE, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
     break;
   default:
     ShouldNotReachHere();
   }
   // [jk] not needed currently

@@ -3587,22 +3554,21 @@
                              ConstantPoolCacheEntry::f2_offset())));
   // make sure exception is reported in correct bcp range (getfield is
   // next instruction)
   __ increment(rbcp);
   __ null_check(rax);
-  __ resolve_for_read(OOP_NOT_NULL, rax);
   const Address field = Address(rax, rbx, Address::times_1, 0*wordSize);
   switch (state) {
   case itos:
-    __ movl(rax, field);
+    __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
     break;
   case atos:
     do_oop_load(_masm, field, rax);
     __ verify_oop(rax);
     break;
   case ftos:
-    __ load_float(field);
+    __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
     break;
   default:
     ShouldNotReachHere();
   }
 
< prev index next >