564 __ mov(tmp2, ShenandoahHeap::in_cset_fast_test_addr());
565 __ lsr(tmp1, res, ShenandoahHeapRegion::region_size_bytes_shift_jint());
566 __ ldrb(tmp2, Address(tmp2, tmp1));
567 __ cbz(tmp2, *stub->continuation());
568
569 // Check if object is already forwarded.
570 Label slow_path;
571 __ ldr(tmp1, Address(res, oopDesc::mark_offset_in_bytes()));
572 __ eon(tmp1, tmp1, zr);
573 __ ands(zr, tmp1, markWord::lock_mask_in_place);
574 __ br(Assembler::NE, slow_path);
575
576 // Decode forwarded object.
577 __ orr(tmp1, tmp1, markWord::marked_value);
578 __ eon(res, tmp1, zr);
579 __ b(*stub->continuation());
580
581 __ bind(slow_path);
582 ce->store_parameter(res, 0);
583 ce->store_parameter(addr, 1);
584 __ far_call(RuntimeAddress(bs->load_reference_barrier_rt_code_blob()->code_begin()));
585
586 __ b(*stub->continuation());
587 }
588
589 #undef __
590
591 #define __ sasm->
592
593 void ShenandoahBarrierSetAssembler::generate_c1_pre_barrier_runtime_stub(StubAssembler* sasm) {
594 __ prologue("shenandoah_pre_barrier", false);
595
596 // arg0 : previous value of memory
597
598 BarrierSet* bs = BarrierSet::barrier_set();
599
600 const Register pre_val = r0;
601 const Register thread = rthread;
602 const Register tmp = rscratch1;
603
604 Address queue_index(thread, in_bytes(ShenandoahThreadLocalData::satb_mark_queue_index_offset()));
619 __ cbz(tmp, runtime);
620
621 __ sub(tmp, tmp, wordSize);
622 __ str(tmp, queue_index);
623 __ ldr(rscratch2, buffer);
624 __ add(tmp, tmp, rscratch2);
625 __ load_parameter(0, rscratch2);
626 __ str(rscratch2, Address(tmp, 0));
627 __ b(done);
628
629 __ bind(runtime);
630 __ push_call_clobbered_registers();
631 __ load_parameter(0, pre_val);
632 __ call_VM_leaf(CAST_FROM_FN_PTR(address, ShenandoahRuntime::write_ref_field_pre_entry), pre_val, thread);
633 __ pop_call_clobbered_registers();
634 __ bind(done);
635
636 __ epilogue();
637 }
638
639 void ShenandoahBarrierSetAssembler::generate_c1_load_reference_barrier_runtime_stub(StubAssembler* sasm) {
640 __ prologue("shenandoah_load_reference_barrier", false);
641 // arg0 : object to be resolved
642
643 __ push_call_clobbered_registers();
644 __ load_parameter(0, r0);
645 __ load_parameter(1, r1);
646 if (UseCompressedOops) {
647 __ mov(lr, CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_narrow));
648 } else {
649 __ mov(lr, CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier));
650 }
651 __ blr(lr);
652 __ mov(rscratch1, r0);
653 __ pop_call_clobbered_registers();
654 __ mov(r0, rscratch1);
655
656 __ epilogue();
657 }
658
659 #undef __
660
661 #endif // COMPILER1
662
663 address ShenandoahBarrierSetAssembler::shenandoah_lrb() {
664 assert(_shenandoah_lrb != NULL, "need load reference barrier stub");
665 return _shenandoah_lrb;
666 }
|
564 __ mov(tmp2, ShenandoahHeap::in_cset_fast_test_addr());
565 __ lsr(tmp1, res, ShenandoahHeapRegion::region_size_bytes_shift_jint());
566 __ ldrb(tmp2, Address(tmp2, tmp1));
567 __ cbz(tmp2, *stub->continuation());
568
569 // Check if object is already forwarded.
570 Label slow_path;
571 __ ldr(tmp1, Address(res, oopDesc::mark_offset_in_bytes()));
572 __ eon(tmp1, tmp1, zr);
573 __ ands(zr, tmp1, markWord::lock_mask_in_place);
574 __ br(Assembler::NE, slow_path);
575
576 // Decode forwarded object.
577 __ orr(tmp1, tmp1, markWord::marked_value);
578 __ eon(res, tmp1, zr);
579 __ b(*stub->continuation());
580
581 __ bind(slow_path);
582 ce->store_parameter(res, 0);
583 ce->store_parameter(addr, 1);
584 if (stub->is_native()) {
585 __ far_call(RuntimeAddress(bs->load_reference_barrier_native_rt_code_blob()->code_begin()));
586 } else {
587 __ far_call(RuntimeAddress(bs->load_reference_barrier_rt_code_blob()->code_begin()));
588 }
589
590 __ b(*stub->continuation());
591 }
592
593 #undef __
594
595 #define __ sasm->
596
597 void ShenandoahBarrierSetAssembler::generate_c1_pre_barrier_runtime_stub(StubAssembler* sasm) {
598 __ prologue("shenandoah_pre_barrier", false);
599
600 // arg0 : previous value of memory
601
602 BarrierSet* bs = BarrierSet::barrier_set();
603
604 const Register pre_val = r0;
605 const Register thread = rthread;
606 const Register tmp = rscratch1;
607
608 Address queue_index(thread, in_bytes(ShenandoahThreadLocalData::satb_mark_queue_index_offset()));
623 __ cbz(tmp, runtime);
624
625 __ sub(tmp, tmp, wordSize);
626 __ str(tmp, queue_index);
627 __ ldr(rscratch2, buffer);
628 __ add(tmp, tmp, rscratch2);
629 __ load_parameter(0, rscratch2);
630 __ str(rscratch2, Address(tmp, 0));
631 __ b(done);
632
633 __ bind(runtime);
634 __ push_call_clobbered_registers();
635 __ load_parameter(0, pre_val);
636 __ call_VM_leaf(CAST_FROM_FN_PTR(address, ShenandoahRuntime::write_ref_field_pre_entry), pre_val, thread);
637 __ pop_call_clobbered_registers();
638 __ bind(done);
639
640 __ epilogue();
641 }
642
643 void ShenandoahBarrierSetAssembler::generate_c1_load_reference_barrier_runtime_stub(StubAssembler* sasm, bool is_native) {
644 __ prologue("shenandoah_load_reference_barrier", false);
645 // arg0 : object to be resolved
646
647 __ push_call_clobbered_registers();
648 __ load_parameter(0, r0);
649 __ load_parameter(1, r1);
650 if (is_native) {
651 __ mov(lr, CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_native));
652 } else if (UseCompressedOops) {
653 __ mov(lr, CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_narrow));
654 } else {
655 __ mov(lr, CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier));
656 }
657 __ blr(lr);
658 __ mov(rscratch1, r0);
659 __ pop_call_clobbered_registers();
660 __ mov(r0, rscratch1);
661
662 __ epilogue();
663 }
664
665 #undef __
666
667 #endif // COMPILER1
668
669 address ShenandoahBarrierSetAssembler::shenandoah_lrb() {
670 assert(_shenandoah_lrb != NULL, "need load reference barrier stub");
671 return _shenandoah_lrb;
672 }
|