< prev index next >

src/hotspot/cpu/aarch64/gc/shenandoah/shenandoahBarrierSetAssembler_aarch64.cpp

Print this page
rev 55741 : 8228369: Shenandoah: Refactor LRB C1 stubs


 455   }
 456   resolve_forward_pointer(masm, tmp1);
 457   resolve_forward_pointer(masm, tmp2);
 458   __ cmp(tmp1, tmp2);
 459   // Retry with expected now being the value we just loaded from addr.
 460   __ br(Assembler::EQ, retry);
 461   if (is_cae && is_narrow) {
 462     // For cmp-and-exchange and narrow oops, we need to restore
 463     // the compressed old-value. We moved it to 'expected' a few lines up.
 464     __ mov(tmp1, expected);
 465   }
 466   __ bind(done);
 467 
 468   if (is_cae) {
 469     __ mov(result, tmp1);
 470   } else {
 471     __ cset(result, Assembler::EQ);
 472   }
 473 }
 474 





















 475 #undef __
 476 
 477 #ifdef COMPILER1
 478 
 479 #define __ ce->masm()->
 480 
 481 void ShenandoahBarrierSetAssembler::gen_pre_barrier_stub(LIR_Assembler* ce, ShenandoahPreBarrierStub* stub) {
 482   ShenandoahBarrierSetC1* bs = (ShenandoahBarrierSetC1*)BarrierSet::barrier_set()->barrier_set_c1();
 483   // At this point we know that marking is in progress.
 484   // If do_load() is true then we have to emit the
 485   // load of the previous value; otherwise it has already
 486   // been loaded into _pre_val.
 487 
 488   __ bind(*stub->entry());
 489 
 490   assert(stub->pre_val()->is_register(), "Precondition.");
 491 
 492   Register pre_val_reg = stub->pre_val()->as_register();
 493 
 494   if (stub->do_load()) {
 495     ce->mem2reg(stub->addr(), stub->pre_val(), T_OBJECT, stub->patch_code(), stub->info(), false /*wide*/, false /*unaligned*/);
 496   }
 497   __ cbz(pre_val_reg, *stub->continuation());
 498   ce->store_parameter(stub->pre_val()->as_register(), 0);
 499   __ far_call(RuntimeAddress(bs->pre_barrier_c1_runtime_code_blob()->code_begin()));
 500   __ b(*stub->continuation());
 501 }
 502 
 503 void ShenandoahBarrierSetAssembler::gen_load_reference_barrier_stub(LIR_Assembler* ce, ShenandoahLoadReferenceBarrierStub* stub) {


 504 
 505   Register obj = stub->obj()->as_register();
 506   Register res = stub->result()->as_register();
 507 
 508   Label done;
 509 
 510   __ bind(*stub->entry());
 511 
 512   if (res != obj) {
 513     __ mov(res, obj);
 514   }

 515   // Check for null.
 516   __ cbz(res, done);
 517 
 518   load_reference_barrier_not_null(ce->masm(), res, rscratch1);

 519 
 520   __ bind(done);
 521   __ b(*stub->continuation());
 522 }
 523 
 524 #undef __
 525 
 526 #define __ sasm->
 527 
 528 void ShenandoahBarrierSetAssembler::generate_c1_pre_barrier_runtime_stub(StubAssembler* sasm) {
 529   __ prologue("shenandoah_pre_barrier", false);
 530 
 531   // arg0 : previous value of memory
 532 
 533   BarrierSet* bs = BarrierSet::barrier_set();
 534 
 535   const Register pre_val = r0;
 536   const Register thread = rthread;
 537   const Register tmp = rscratch1;
 538 
 539   Address queue_index(thread, in_bytes(ShenandoahThreadLocalData::satb_mark_queue_index_offset()));
 540   Address buffer(thread, in_bytes(ShenandoahThreadLocalData::satb_mark_queue_buffer_offset()));


 554   __ cbz(tmp, runtime);
 555 
 556   __ sub(tmp, tmp, wordSize);
 557   __ str(tmp, queue_index);
 558   __ ldr(rscratch2, buffer);
 559   __ add(tmp, tmp, rscratch2);
 560   __ load_parameter(0, rscratch2);
 561   __ str(rscratch2, Address(tmp, 0));
 562   __ b(done);
 563 
 564   __ bind(runtime);
 565   __ push_call_clobbered_registers();
 566   __ load_parameter(0, pre_val);
 567   __ call_VM_leaf(CAST_FROM_FN_PTR(address, ShenandoahRuntime::write_ref_field_pre_entry), pre_val, thread);
 568   __ pop_call_clobbered_registers();
 569   __ bind(done);
 570 
 571   __ epilogue();
 572 }
 573 


























 574 #undef __
 575 
 576 #endif // COMPILER1
 577 
 578 address ShenandoahBarrierSetAssembler::shenandoah_lrb() {
 579   assert(_shenandoah_lrb != NULL, "need load reference barrier stub");
 580   return _shenandoah_lrb;
 581 }
 582 
 583 #define __ cgen->assembler()->
 584 
 585 // Shenandoah load reference barrier.
 586 //
 587 // Input:
 588 //   r0: OOP to evacuate.  Not null.
 589 //
 590 // Output:
 591 //   r0: Pointer to evacuated OOP.
 592 //
 593 // Trash rscratch1, rscratch2.  Preserve everything else.
 594 address ShenandoahBarrierSetAssembler::generate_shenandoah_lrb(StubCodeGenerator* cgen) {
 595 
 596   __ align(6);
 597   StubCodeMark mark(cgen, "StubRoutines", "shenandoah_lrb");
 598   address start = __ pc();
 599 
 600   Label work, done;
 601   __ mov(rscratch2, ShenandoahHeap::in_cset_fast_test_addr());
 602   __ lsr(rscratch1, r0, ShenandoahHeapRegion::region_size_bytes_shift_jint());
 603   __ ldrb(rscratch2, Address(rscratch2, rscratch1));
 604   __ tbnz(rscratch2, 0, work);
 605   __ ret(lr);
 606   __ bind(work);
 607 
 608   __ mov(rscratch2, r0);
 609   resolve_forward_pointer_not_null(cgen->assembler(), r0, rscratch1);
 610   __ cmp(rscratch2, r0);
 611   __ br(Assembler::NE, done);
 612 

 613   __ enter(); // required for proper stackwalking of RuntimeStub frame
 614 
 615   __ push_call_clobbered_registers();
 616 
 617   __ mov(lr, CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier));
 618   __ blrt(lr, 1, 0, MacroAssembler::ret_type_integral);
 619   __ mov(rscratch1, r0);
 620   __ pop_call_clobbered_registers();
 621   __ mov(r0, rscratch1);
 622 
 623   __ leave(); // required for proper stackwalking of RuntimeStub frame
 624   __ bind(done);
 625   __ ret(lr);
 626 
 627   return start;
 628 }
 629 
 630 #undef __
 631 
 632 void ShenandoahBarrierSetAssembler::barrier_stubs_init() {
 633   if (ShenandoahLoadRefBarrier) {
 634     int stub_code_size = 2048;
 635     ResourceMark rm;
 636     BufferBlob* bb = BufferBlob::create("shenandoah_barrier_stubs", stub_code_size);
 637     CodeBuffer buf(bb);
 638     StubCodeGenerator cgen(&buf);
 639     _shenandoah_lrb = generate_shenandoah_lrb(&cgen);
 640   }
 641 }


 455   }
 456   resolve_forward_pointer(masm, tmp1);
 457   resolve_forward_pointer(masm, tmp2);
 458   __ cmp(tmp1, tmp2);
 459   // Retry with expected now being the value we just loaded from addr.
 460   __ br(Assembler::EQ, retry);
 461   if (is_cae && is_narrow) {
 462     // For cmp-and-exchange and narrow oops, we need to restore
 463     // the compressed old-value. We moved it to 'expected' a few lines up.
 464     __ mov(tmp1, expected);
 465   }
 466   __ bind(done);
 467 
 468   if (is_cae) {
 469     __ mov(result, tmp1);
 470   } else {
 471     __ cset(result, Assembler::EQ);
 472   }
 473 }
 474 
 475 // Generate cset check. If obj is in cset, branch to in_cset label, otherwise fall through
 476 // obj: Register holding the oop, preserved
 477 // tmp1, tmp2: temp registers, trashed
 478 void ShenandoahBarrierSetAssembler::gen_cset_check(MacroAssembler* masm, Register obj, Register tmp1, Register tmp2, Label& in_cset) {
 479   __ mov(tmp2, ShenandoahHeap::in_cset_fast_test_addr());
 480   __ lsr(tmp1, obj, ShenandoahHeapRegion::region_size_bytes_shift_jint());
 481   __ ldrb(tmp2, Address(tmp2, tmp1));
 482   __ tbnz(tmp2, 0, in_cset);
 483 }
 484 
 485 // Generate check if object is resolved. Branch to not_resolved label, if not. Otherwise return resolved
 486 // object in obj register.
 487 // obj: object, resolved object on normal return
 488 // tmp1, tmp2: temp registers, trashed
 489 void ShenandoahBarrierSetAssembler::gen_resolved_check(MacroAssembler* masm, Register obj, Register tmp1, Register tmp2, Label& resolved) {
 490   __ mov(tmp2, obj);
 491   resolve_forward_pointer_not_null(masm, obj, tmp1);
 492   __ cmp(tmp2, obj);
 493   __ br(Assembler::EQ, resolved);
 494 }
 495 
 496 #undef __
 497 
 498 #ifdef COMPILER1
 499 
 500 #define __ ce->masm()->
 501 
 502 void ShenandoahBarrierSetAssembler::gen_pre_barrier_stub(LIR_Assembler* ce, ShenandoahPreBarrierStub* stub) {
 503   ShenandoahBarrierSetC1* bs = (ShenandoahBarrierSetC1*)BarrierSet::barrier_set()->barrier_set_c1();
 504   // At this point we know that marking is in progress.
 505   // If do_load() is true then we have to emit the
 506   // load of the previous value; otherwise it has already
 507   // been loaded into _pre_val.
 508 
 509   __ bind(*stub->entry());
 510 
 511   assert(stub->pre_val()->is_register(), "Precondition.");
 512 
 513   Register pre_val_reg = stub->pre_val()->as_register();
 514 
 515   if (stub->do_load()) {
 516     ce->mem2reg(stub->addr(), stub->pre_val(), T_OBJECT, stub->patch_code(), stub->info(), false /*wide*/, false /*unaligned*/);
 517   }
 518   __ cbz(pre_val_reg, *stub->continuation());
 519   ce->store_parameter(stub->pre_val()->as_register(), 0);
 520   __ far_call(RuntimeAddress(bs->pre_barrier_c1_runtime_code_blob()->code_begin()));
 521   __ b(*stub->continuation());
 522 }
 523 
 524 void ShenandoahBarrierSetAssembler::gen_load_reference_barrier_stub(LIR_Assembler* ce, ShenandoahLoadReferenceBarrierStub* stub) {
 525   ShenandoahBarrierSetC1* bs = (ShenandoahBarrierSetC1*)BarrierSet::barrier_set()->barrier_set_c1();
 526   __ bind(*stub->entry());
 527 
 528   Register obj = stub->obj()->as_register();
 529   Register res = stub->result()->as_register();
 530    assert(res == r0, "result must arrive in r0");



 531 
 532   if (res != obj) {
 533     __ mov(res, obj);
 534   }
 535 
 536   // Check for null.
 537   __ cbz(res, *stub->continuation());
 538 
 539   ce->store_parameter(res, 0);
 540   __ far_call(RuntimeAddress(bs->load_reference_barrier_rt_code_blob()->code_begin()));
 541 

 542   __ b(*stub->continuation());
 543 }
 544 
 545 #undef __
 546 
 547 #define __ sasm->
 548 
 549 void ShenandoahBarrierSetAssembler::generate_c1_pre_barrier_runtime_stub(StubAssembler* sasm) {
 550   __ prologue("shenandoah_pre_barrier", false);
 551 
 552   // arg0 : previous value of memory
 553 
 554   BarrierSet* bs = BarrierSet::barrier_set();
 555 
 556   const Register pre_val = r0;
 557   const Register thread = rthread;
 558   const Register tmp = rscratch1;
 559 
 560   Address queue_index(thread, in_bytes(ShenandoahThreadLocalData::satb_mark_queue_index_offset()));
 561   Address buffer(thread, in_bytes(ShenandoahThreadLocalData::satb_mark_queue_buffer_offset()));


 575   __ cbz(tmp, runtime);
 576 
 577   __ sub(tmp, tmp, wordSize);
 578   __ str(tmp, queue_index);
 579   __ ldr(rscratch2, buffer);
 580   __ add(tmp, tmp, rscratch2);
 581   __ load_parameter(0, rscratch2);
 582   __ str(rscratch2, Address(tmp, 0));
 583   __ b(done);
 584 
 585   __ bind(runtime);
 586   __ push_call_clobbered_registers();
 587   __ load_parameter(0, pre_val);
 588   __ call_VM_leaf(CAST_FROM_FN_PTR(address, ShenandoahRuntime::write_ref_field_pre_entry), pre_val, thread);
 589   __ pop_call_clobbered_registers();
 590   __ bind(done);
 591 
 592   __ epilogue();
 593 }
 594 
 595 void ShenandoahBarrierSetAssembler::generate_c1_load_reference_barrier_runtime_stub(StubAssembler* sasm) {
 596   __ prologue("shenandoah_load_reference_barrier", false);
 597   // arg0 : object to be resolved
 598 
 599   Label work, call_rt;
 600   __ load_parameter(0, r0);
 601   gen_cset_check(sasm, r0, rscratch1, rscratch2, work);
 602   __ epilogue();
 603 
 604   __ bind(work);
 605   gen_resolved_check(sasm, r0, rscratch1, rscratch2, call_rt);
 606   __ epilogue();
 607 
 608   __ bind(call_rt);
 609 
 610   __ push_call_clobbered_registers();
 611 
 612   __ mov(lr, CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier));
 613   __ blrt(lr, 1, 0, MacroAssembler::ret_type_integral);
 614   __ mov(rscratch1, r0);
 615   __ pop_call_clobbered_registers();
 616   __ mov(r0, rscratch1);
 617 
 618   __ epilogue();
 619 }
 620 
 621 #undef __
 622 
 623 #endif // COMPILER1
 624 
 625 address ShenandoahBarrierSetAssembler::shenandoah_lrb() {
 626   assert(_shenandoah_lrb != NULL, "need load reference barrier stub");
 627   return _shenandoah_lrb;
 628 }
 629 
 630 #define __ cgen->assembler()->
 631 
 632 // Shenandoah load reference barrier.
 633 //
 634 // Input:
 635 //   r0: OOP to evacuate.  Not null.
 636 //
 637 // Output:
 638 //   r0: Pointer to evacuated OOP.
 639 //
 640 // Trash rscratch1, rscratch2.  Preserve everything else.
 641 address ShenandoahBarrierSetAssembler::generate_shenandoah_lrb(StubCodeGenerator* cgen) {
 642 
 643   __ align(6);
 644   StubCodeMark mark(cgen, "StubRoutines", "shenandoah_lrb");
 645   address start = __ pc();
 646 
 647   Label work, call_rt;
 648   gen_cset_check(cgen->assembler(), r0, rscratch1, rscratch2, work);



 649   __ ret(lr);

 650 
 651   __ bind(work);
 652   gen_resolved_check(cgen->assembler(), r0, rscratch1, rscratch2, call_rt);
 653   __ ret(lr);

 654 
 655   __ bind(call_rt);
 656   __ enter(); // required for proper stackwalking of RuntimeStub frame
 657 
 658   __ push_call_clobbered_registers();
 659 
 660   __ mov(lr, CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier));
 661   __ blrt(lr, 1, 0, MacroAssembler::ret_type_integral);
 662   __ mov(rscratch1, r0);
 663   __ pop_call_clobbered_registers();
 664   __ mov(r0, rscratch1);
 665 
 666   __ leave(); // required for proper stackwalking of RuntimeStub frame

 667   __ ret(lr);
 668 
 669   return start;
 670 }
 671 
 672 #undef __
 673 
 674 void ShenandoahBarrierSetAssembler::barrier_stubs_init() {
 675   if (ShenandoahLoadRefBarrier) {
 676     int stub_code_size = 2048;
 677     ResourceMark rm;
 678     BufferBlob* bb = BufferBlob::create("shenandoah_barrier_stubs", stub_code_size);
 679     CodeBuffer buf(bb);
 680     StubCodeGenerator cgen(&buf);
 681     _shenandoah_lrb = generate_shenandoah_lrb(&cgen);
 682   }
 683 }
< prev index next >