504 // If do_load() is true then we have to emit the
505 // load of the previous value; otherwise it has already
506 // been loaded into _pre_val.
507
508 __ bind(*stub->entry());
509
510 assert(stub->pre_val()->is_register(), "Precondition.");
511
512 Register pre_val_reg = stub->pre_val()->as_register();
513
514 if (stub->do_load()) {
515 ce->mem2reg(stub->addr(), stub->pre_val(), T_OBJECT, stub->patch_code(), stub->info(), false /*wide*/, false /*unaligned*/);
516 }
517 __ cbz(pre_val_reg, *stub->continuation());
518 ce->store_parameter(stub->pre_val()->as_register(), 0);
519 __ far_call(RuntimeAddress(bs->pre_barrier_c1_runtime_code_blob()->code_begin()));
520 __ b(*stub->continuation());
521 }
522
523 void ShenandoahBarrierSetAssembler::gen_load_reference_barrier_stub(LIR_Assembler* ce, ShenandoahLoadReferenceBarrierStub* stub) {
524
525 Register obj = stub->obj()->as_register();
526 Register res = stub->result()->as_register();
527
528 Label done;
529
530 __ bind(*stub->entry());
531
532 if (res != obj) {
533 __ mov(res, obj);
534 }
535 // Check for null.
536 __ cbz(res, done);
537
538 load_reference_barrier_not_null(ce->masm(), res, rscratch1);
539
540 __ bind(done);
541 __ b(*stub->continuation());
542 }
543
544 #undef __
545
546 #define __ sasm->
547
548 void ShenandoahBarrierSetAssembler::generate_c1_pre_barrier_runtime_stub(StubAssembler* sasm) {
549 __ prologue("shenandoah_pre_barrier", false);
550
551 // arg0 : previous value of memory
552
553 BarrierSet* bs = BarrierSet::barrier_set();
554
555 const Register pre_val = r0;
556 const Register thread = rthread;
557 const Register tmp = rscratch1;
558
559 Address queue_index(thread, in_bytes(ShenandoahThreadLocalData::satb_mark_queue_index_offset()));
560 Address buffer(thread, in_bytes(ShenandoahThreadLocalData::satb_mark_queue_buffer_offset()));
574 __ cbz(tmp, runtime);
575
576 __ sub(tmp, tmp, wordSize);
577 __ str(tmp, queue_index);
578 __ ldr(rscratch2, buffer);
579 __ add(tmp, tmp, rscratch2);
580 __ load_parameter(0, rscratch2);
581 __ str(rscratch2, Address(tmp, 0));
582 __ b(done);
583
584 __ bind(runtime);
585 __ push_call_clobbered_registers();
586 __ load_parameter(0, pre_val);
587 __ call_VM_leaf(CAST_FROM_FN_PTR(address, ShenandoahRuntime::write_ref_field_pre_entry), pre_val, thread);
588 __ pop_call_clobbered_registers();
589 __ bind(done);
590
591 __ epilogue();
592 }
593
594 #undef __
595
596 #endif // COMPILER1
597
598 address ShenandoahBarrierSetAssembler::shenandoah_lrb() {
599 assert(_shenandoah_lrb != NULL, "need load reference barrier stub");
600 return _shenandoah_lrb;
601 }
602
603 #define __ cgen->assembler()->
604
605 // Shenandoah load reference barrier.
606 //
607 // Input:
608 // r0: OOP to evacuate. Not null.
609 //
610 // Output:
611 // r0: Pointer to evacuated OOP.
612 //
613 // Trash rscratch1, rscratch2. Preserve everything else.
614 address ShenandoahBarrierSetAssembler::generate_shenandoah_lrb(StubCodeGenerator* cgen) {
615
616 __ align(6);
617 StubCodeMark mark(cgen, "StubRoutines", "shenandoah_lrb");
618 address start = __ pc();
619
620 Label work, done;
621 __ mov(rscratch2, ShenandoahHeap::in_cset_fast_test_addr());
622 __ lsr(rscratch1, r0, ShenandoahHeapRegion::region_size_bytes_shift_jint());
623 __ ldrb(rscratch2, Address(rscratch2, rscratch1));
624 __ tbnz(rscratch2, 0, work);
625 __ ret(lr);
626 __ bind(work);
627
628 __ mov(rscratch2, r0);
629 resolve_forward_pointer_not_null(cgen->assembler(), r0, rscratch1);
630 __ cmp(rscratch2, r0);
631 __ br(Assembler::NE, done);
632
633 __ enter(); // required for proper stackwalking of RuntimeStub frame
634
635 __ push_call_clobbered_registers();
636
637 __ mov(lr, CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier));
638 __ blr(lr);
639 __ mov(rscratch1, r0);
640 __ pop_call_clobbered_registers();
641 __ mov(r0, rscratch1);
642
643 __ leave(); // required for proper stackwalking of RuntimeStub frame
644 __ bind(done);
645 __ ret(lr);
646
647 return start;
648 }
649
650 #undef __
651
652 void ShenandoahBarrierSetAssembler::barrier_stubs_init() {
|
504 // If do_load() is true then we have to emit the
505 // load of the previous value; otherwise it has already
506 // been loaded into _pre_val.
507
508 __ bind(*stub->entry());
509
510 assert(stub->pre_val()->is_register(), "Precondition.");
511
512 Register pre_val_reg = stub->pre_val()->as_register();
513
514 if (stub->do_load()) {
515 ce->mem2reg(stub->addr(), stub->pre_val(), T_OBJECT, stub->patch_code(), stub->info(), false /*wide*/, false /*unaligned*/);
516 }
517 __ cbz(pre_val_reg, *stub->continuation());
518 ce->store_parameter(stub->pre_val()->as_register(), 0);
519 __ far_call(RuntimeAddress(bs->pre_barrier_c1_runtime_code_blob()->code_begin()));
520 __ b(*stub->continuation());
521 }
522
523 void ShenandoahBarrierSetAssembler::gen_load_reference_barrier_stub(LIR_Assembler* ce, ShenandoahLoadReferenceBarrierStub* stub) {
524 ShenandoahBarrierSetC1* bs = (ShenandoahBarrierSetC1*)BarrierSet::barrier_set()->barrier_set_c1();
525 __ bind(*stub->entry());
526
527 Register obj = stub->obj()->as_register();
528 Register res = stub->result()->as_register();
529 Register tmp1 = stub->tmp1()->as_register();
530 Register tmp2 = stub->tmp2()->as_register();
531
532 assert(res == r0, "result must arrive in r0");
533
534 if (res != obj) {
535 __ mov(res, obj);
536 }
537
538 // Check for null.
539 __ cbz(res, *stub->continuation());
540
541 // Check for object in cset.
542 __ mov(tmp2, ShenandoahHeap::in_cset_fast_test_addr());
543 __ lsr(tmp1, res, ShenandoahHeapRegion::region_size_bytes_shift_jint());
544 __ ldrb(tmp2, Address(tmp2, tmp1));
545 __ cbz(tmp2, *stub->continuation());
546
547 // Check if object is already forwarded.
548 Label slow_path;
549 __ ldr(tmp1, Address(res, oopDesc::mark_offset_in_bytes()));
550 __ eon(tmp1, tmp1, zr);
551 __ ands(zr, tmp1, markOopDesc::lock_mask_in_place);
552 __ br(Assembler::NE, slow_path);
553
554 // Decode forwarded object.
555 __ orr(tmp1, tmp1, markOopDesc::marked_value);
556 __ eon(res, tmp1, zr);
557 __ b(*stub->continuation());
558
559 __ bind(slow_path);
560 ce->store_parameter(res, 0);
561 __ far_call(RuntimeAddress(bs->load_reference_barrier_rt_code_blob()->code_begin()));
562
563 __ b(*stub->continuation());
564 }
565
566 #undef __
567
568 #define __ sasm->
569
570 void ShenandoahBarrierSetAssembler::generate_c1_pre_barrier_runtime_stub(StubAssembler* sasm) {
571 __ prologue("shenandoah_pre_barrier", false);
572
573 // arg0 : previous value of memory
574
575 BarrierSet* bs = BarrierSet::barrier_set();
576
577 const Register pre_val = r0;
578 const Register thread = rthread;
579 const Register tmp = rscratch1;
580
581 Address queue_index(thread, in_bytes(ShenandoahThreadLocalData::satb_mark_queue_index_offset()));
582 Address buffer(thread, in_bytes(ShenandoahThreadLocalData::satb_mark_queue_buffer_offset()));
596 __ cbz(tmp, runtime);
597
598 __ sub(tmp, tmp, wordSize);
599 __ str(tmp, queue_index);
600 __ ldr(rscratch2, buffer);
601 __ add(tmp, tmp, rscratch2);
602 __ load_parameter(0, rscratch2);
603 __ str(rscratch2, Address(tmp, 0));
604 __ b(done);
605
606 __ bind(runtime);
607 __ push_call_clobbered_registers();
608 __ load_parameter(0, pre_val);
609 __ call_VM_leaf(CAST_FROM_FN_PTR(address, ShenandoahRuntime::write_ref_field_pre_entry), pre_val, thread);
610 __ pop_call_clobbered_registers();
611 __ bind(done);
612
613 __ epilogue();
614 }
615
616 void ShenandoahBarrierSetAssembler::generate_c1_load_reference_barrier_runtime_stub(StubAssembler* sasm) {
617 __ prologue("shenandoah_load_reference_barrier", false);
618 // arg0 : object to be resolved
619
620 __ push_call_clobbered_registers();
621 __ load_parameter(0, r0);
622 __ mov(lr, CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier));
623 __ blrt(lr, 1, 0, MacroAssembler::ret_type_integral);
624 __ mov(rscratch1, r0);
625 __ pop_call_clobbered_registers();
626 __ mov(r0, rscratch1);
627
628 __ epilogue();
629 }
630
631 #undef __
632
633 #endif // COMPILER1
634
635 address ShenandoahBarrierSetAssembler::shenandoah_lrb() {
636 assert(_shenandoah_lrb != NULL, "need load reference barrier stub");
637 return _shenandoah_lrb;
638 }
639
640 #define __ cgen->assembler()->
641
642 // Shenandoah load reference barrier.
643 //
644 // Input:
645 // r0: OOP to evacuate. Not null.
646 //
647 // Output:
648 // r0: Pointer to evacuated OOP.
649 //
650 // Trash rscratch1, rscratch2. Preserve everything else.
651 address ShenandoahBarrierSetAssembler::generate_shenandoah_lrb(StubCodeGenerator* cgen) {
652
653 __ align(6);
654 StubCodeMark mark(cgen, "StubRoutines", "shenandoah_lrb");
655 address start = __ pc();
656
657 Label work, done;
658 __ mov(rscratch2, ShenandoahHeap::in_cset_fast_test_addr());
659 __ lsr(rscratch1, r0, ShenandoahHeapRegion::region_size_bytes_shift_jint());
660 __ ldrb(rscratch2, Address(rscratch2, rscratch1));
661 __ tbnz(rscratch2, 0, work);
662 __ ret(lr);
663 __ bind(work);
664
665 Label slow_path;
666 __ ldr(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
667 __ eon(rscratch1, rscratch1, zr);
668 __ ands(zr, rscratch1, markOopDesc::lock_mask_in_place);
669 __ br(Assembler::NE, slow_path);
670
671 // Decode forwarded object.
672 __ orr(rscratch1, rscratch1, markOopDesc::marked_value);
673 __ eon(r0, rscratch1, zr);
674 __ ret(lr);
675
676 __ bind(slow_path);
677 __ enter(); // required for proper stackwalking of RuntimeStub frame
678
679 __ push_call_clobbered_registers();
680
681 __ mov(lr, CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier));
682 __ blr(lr);
683 __ mov(rscratch1, r0);
684 __ pop_call_clobbered_registers();
685 __ mov(r0, rscratch1);
686
687 __ leave(); // required for proper stackwalking of RuntimeStub frame
688 __ bind(done);
689 __ ret(lr);
690
691 return start;
692 }
693
694 #undef __
695
696 void ShenandoahBarrierSetAssembler::barrier_stubs_init() {
|