< prev index next >

src/hotspot/cpu/aarch64/gc/shared/barrierSetAssembler_aarch64.hpp

Print this page

        

*** 41,51 **** virtual void arraycopy_epilogue(MacroAssembler* masm, DecoratorSet decorators, bool is_oop, Register start, Register end, Register tmp, RegSet saved_regs) {} virtual void load_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type, Register dst, Address src, Register tmp1, Register tmp_thread); virtual void store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type, ! Address dst, Register val, Register tmp1, Register tmp2); virtual void obj_equals(MacroAssembler* masm, Register obj1, Register obj2); virtual void resolve(MacroAssembler* masm, DecoratorSet decorators, Register obj) { --- 41,51 ---- virtual void arraycopy_epilogue(MacroAssembler* masm, DecoratorSet decorators, bool is_oop, Register start, Register end, Register tmp, RegSet saved_regs) {} virtual void load_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type, Register dst, Address src, Register tmp1, Register tmp_thread); virtual void store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type, ! Address dst, Register val, Register tmp1, Register tmp2, Register tmp3 = noreg); virtual void obj_equals(MacroAssembler* masm, Register obj1, Register obj2); virtual void resolve(MacroAssembler* masm, DecoratorSet decorators, Register obj) {
*** 70,77 **** --- 70,78 ---- int con_size_in_bytes, // object size in bytes if known at compile time Register t1, // temp register Label& slow_case // continuation point if fast allocation fails ); virtual void barrier_stubs_init() {} + virtual void nmethod_entry_barrier(MacroAssembler* masm); }; #endif // CPU_AARCH64_GC_SHARED_BARRIERSETASSEMBLER_AARCH64_HPP
< prev index next >