< prev index next >

src/cpu/aarch64/vm/shenandoahBarrierSetAssembler_aarch64.cpp

Print this page
rev 11463 : Backport Traversal GC


 103   if (dst != r0) {
 104     __ push(to_save, sp);
 105     __ mov(r0, dst);
 106   }
 107 
 108   __ push_call_clobbered_registers();
 109   __ super_call_VM_leaf(CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_interpreter), r0);
 110   __ mov(rscratch1, r0);
 111   __ pop_call_clobbered_registers();
 112   __ mov(r0, rscratch1);
 113 
 114   if (dst != r0) {
 115     __ mov(dst, r0);
 116     __ pop(to_save, sp);
 117   }
 118 
 119   __ bind(done);
 120   __ leave();
 121 }
 122 















 123 void ShenandoahBarrierSetAssembler::load_reference_barrier(MacroAssembler* masm, Register dst) {
 124   if (ShenandoahLoadRefBarrier) {
 125     Label is_null;
 126     __ cbz(dst, is_null);
 127     load_reference_barrier_not_null(masm, dst);
 128     __ bind(is_null);
 129   }
 130 }
 131 
 132 void ShenandoahBarrierSetAssembler::cmpxchg_oop(MacroAssembler* masm, Register addr, Register expected, Register new_val,
 133                                                 bool acquire, bool release, bool weak, bool is_cae,
 134                                                 Register result) {
 135 
 136   Register tmp1 = rscratch1;
 137   Register tmp2 = rscratch2;
 138   bool is_narrow = UseCompressedOops;
 139   Assembler::operand_size size = is_narrow ? Assembler::word : Assembler::xword;
 140 
 141   assert_different_registers(addr, expected, new_val, tmp1, tmp2);
 142 




 103   if (dst != r0) {
 104     __ push(to_save, sp);
 105     __ mov(r0, dst);
 106   }
 107 
 108   __ push_call_clobbered_registers();
 109   __ super_call_VM_leaf(CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_interpreter), r0);
 110   __ mov(rscratch1, r0);
 111   __ pop_call_clobbered_registers();
 112   __ mov(r0, rscratch1);
 113 
 114   if (dst != r0) {
 115     __ mov(dst, r0);
 116     __ pop(to_save, sp);
 117   }
 118 
 119   __ bind(done);
 120   __ leave();
 121 }
 122 
 123 void ShenandoahBarrierSetAssembler::storeval_barrier(MacroAssembler* masm, Register dst, Register tmp) {
 124   if (ShenandoahStoreValEnqueueBarrier) {
 125     // Save possibly live regs.
 126     RegSet live_regs = RegSet::range(r0, r4) - dst;
 127     __ push(live_regs, sp);
 128     __ strd(v0, __ pre(sp, 2 * -wordSize));
 129 
 130     __ g1_write_barrier_pre(noreg, dst, rthread, tmp, true, false);
 131 
 132     // Restore possibly live regs.
 133     __ ldrd(v0, __ post(sp, 2 * wordSize));
 134     __ pop(live_regs, sp);
 135   }
 136 }
 137 
 138 void ShenandoahBarrierSetAssembler::load_reference_barrier(MacroAssembler* masm, Register dst) {
 139   if (ShenandoahLoadRefBarrier) {
 140     Label is_null;
 141     __ cbz(dst, is_null);
 142     load_reference_barrier_not_null(masm, dst);
 143     __ bind(is_null);
 144   }
 145 }
 146 
 147 void ShenandoahBarrierSetAssembler::cmpxchg_oop(MacroAssembler* masm, Register addr, Register expected, Register new_val,
 148                                                 bool acquire, bool release, bool weak, bool is_cae,
 149                                                 Register result) {
 150 
 151   Register tmp1 = rscratch1;
 152   Register tmp2 = rscratch2;
 153   bool is_narrow = UseCompressedOops;
 154   Assembler::operand_size size = is_narrow ? Assembler::word : Assembler::xword;
 155 
 156   assert_different_registers(addr, expected, new_val, tmp1, tmp2);
 157 


< prev index next >