< prev index next >

src/hotspot/cpu/aarch64/gc/shared/barrierSetAssembler_aarch64.cpp

Print this page




  56     } else {
  57       assert(in_native, "why else?");
  58       __ ldr(dst, src);
  59     }
  60     break;
  61   }
  62   case T_BOOLEAN: __ load_unsigned_byte (dst, src); break;
  63   case T_BYTE:    __ load_signed_byte   (dst, src); break;
  64   case T_CHAR:    __ load_unsigned_short(dst, src); break;
  65   case T_SHORT:   __ load_signed_short  (dst, src); break;
  66   case T_INT:     __ ldrw               (dst, src); break;
  67   case T_LONG:    __ ldr                (dst, src); break;
  68   case T_ADDRESS: __ ldr                (dst, src); break;
  69   case T_FLOAT:   __ ldrs               (v0, src);  break;
  70   case T_DOUBLE:  __ ldrd               (v0, src);  break;
  71   default: Unimplemented();
  72   }
  73 }
  74 
  75 void BarrierSetAssembler::store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
  76                                    Address dst, Register val, Register tmp1, Register tmp2) {
  77   bool in_heap = (decorators & IN_HEAP) != 0;
  78   bool in_native = (decorators & IN_NATIVE) != 0;
  79   switch (type) {
  80   case T_OBJECT:
  81   case T_ARRAY: {
  82     val = val == noreg ? zr : val;
  83     if (in_heap) {
  84       if (UseCompressedOops) {
  85         assert(!dst.uses(val), "not enough registers");
  86         if (val != zr) {
  87           __ encode_heap_oop(val);
  88         }
  89         __ strw(val, dst);
  90       } else {
  91         __ str(val, dst);
  92       }
  93     } else {
  94       assert(in_native, "why else?");
  95       __ str(val, dst);
  96     }


 212     __ cbnzw(rscratch2, retry);
 213 
 214     incr_allocated_bytes(masm, var_size_in_bytes, con_size_in_bytes, t1);
 215   }
 216 }
 217 
 218 void BarrierSetAssembler::incr_allocated_bytes(MacroAssembler* masm,
 219                                                Register var_size_in_bytes,
 220                                                int con_size_in_bytes,
 221                                                Register t1) {
 222   assert(t1->is_valid(), "need temp reg");
 223 
 224   __ ldr(t1, Address(rthread, in_bytes(JavaThread::allocated_bytes_offset())));
 225   if (var_size_in_bytes->is_valid()) {
 226     __ add(t1, t1, var_size_in_bytes);
 227   } else {
 228     __ add(t1, t1, con_size_in_bytes);
 229   }
 230   __ str(t1, Address(rthread, in_bytes(JavaThread::allocated_bytes_offset())));
 231 }




















  56     } else {
  57       assert(in_native, "why else?");
  58       __ ldr(dst, src);
  59     }
  60     break;
  61   }
  62   case T_BOOLEAN: __ load_unsigned_byte (dst, src); break;
  63   case T_BYTE:    __ load_signed_byte   (dst, src); break;
  64   case T_CHAR:    __ load_unsigned_short(dst, src); break;
  65   case T_SHORT:   __ load_signed_short  (dst, src); break;
  66   case T_INT:     __ ldrw               (dst, src); break;
  67   case T_LONG:    __ ldr                (dst, src); break;
  68   case T_ADDRESS: __ ldr                (dst, src); break;
  69   case T_FLOAT:   __ ldrs               (v0, src);  break;
  70   case T_DOUBLE:  __ ldrd               (v0, src);  break;
  71   default: Unimplemented();
  72   }
  73 }
  74 
  75 void BarrierSetAssembler::store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
  76                                    Address dst, Register val, Register tmp1, Register tmp2, Register tmp3) {
  77   bool in_heap = (decorators & IN_HEAP) != 0;
  78   bool in_native = (decorators & IN_NATIVE) != 0;
  79   switch (type) {
  80   case T_OBJECT:
  81   case T_ARRAY: {
  82     val = val == noreg ? zr : val;
  83     if (in_heap) {
  84       if (UseCompressedOops) {
  85         assert(!dst.uses(val), "not enough registers");
  86         if (val != zr) {
  87           __ encode_heap_oop(val);
  88         }
  89         __ strw(val, dst);
  90       } else {
  91         __ str(val, dst);
  92       }
  93     } else {
  94       assert(in_native, "why else?");
  95       __ str(val, dst);
  96     }


 212     __ cbnzw(rscratch2, retry);
 213 
 214     incr_allocated_bytes(masm, var_size_in_bytes, con_size_in_bytes, t1);
 215   }
 216 }
 217 
 218 void BarrierSetAssembler::incr_allocated_bytes(MacroAssembler* masm,
 219                                                Register var_size_in_bytes,
 220                                                int con_size_in_bytes,
 221                                                Register t1) {
 222   assert(t1->is_valid(), "need temp reg");
 223 
 224   __ ldr(t1, Address(rthread, in_bytes(JavaThread::allocated_bytes_offset())));
 225   if (var_size_in_bytes->is_valid()) {
 226     __ add(t1, t1, var_size_in_bytes);
 227   } else {
 228     __ add(t1, t1, con_size_in_bytes);
 229   }
 230   __ str(t1, Address(rthread, in_bytes(JavaThread::allocated_bytes_offset())));
 231 }
 232 
 233 void BarrierSetAssembler::nmethod_entry_barrier(MacroAssembler* masm)  {
 234 // DMS CHECK: 8210498: nmethod entry barriers is not implemented
 235 #if 0
 236  BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
 237   if (bs_nm == NULL) {
 238     return;
 239   }
 240   Label continuation;
 241   Address disarmed_addr(rthread, in_bytes(bs_nm->thread_disarmed_offset()));
 242   __ align(8);
 243   __ ldr(rscratch1, disarmed_addr);
 244   __ cbz(rscratch1, continuation);
 245   __ blr(RuntimeAddress(StubRoutines::aarch64::method_entry_barrier()));
 246   __ bind(continuation);
 247 #endif
 248 }
 249 
< prev index next >