6 * published by the Free Software Foundation.
7 *
8 * This code is distributed in the hope that it will be useful, but WITHOUT
9 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
10 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
11 * version 2 for more details (a copy is included in the LICENSE file that
12 * accompanied this code).
13 *
14 * You should have received a copy of the GNU General Public License version
15 * 2 along with this work; if not, write to the Free Software Foundation,
16 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
17 *
18 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
19 * or visit www.oracle.com if you need additional information or have any
20 * questions.
21 *
22 */
23
24 #include "precompiled.hpp"
25 #include "asm/macroAssembler.inline.hpp"
26 #include "gc/g1/g1BarrierSet.hpp"
27 #include "gc/g1/g1CardTable.hpp"
28 #include "gc/g1/g1BarrierSetAssembler.hpp"
29 #include "gc/g1/g1ThreadLocalData.hpp"
30 #include "gc/g1/heapRegion.hpp"
31 #include "interpreter/interp_masm.hpp"
32 #include "runtime/sharedRuntime.hpp"
33 #include "utilities/macros.hpp"
34
35 #define __ masm->
36
37 void G1BarrierSetAssembler::gen_write_ref_array_pre_barrier(MacroAssembler* masm, DecoratorSet decorators,
38 Register addr, Register count) {
39 bool dest_uninitialized = (decorators & AS_DEST_NOT_INITIALIZED) != 0;
40 // With G1, don't generate the call if we statically know that the target in uninitialized
41 if (!dest_uninitialized) {
42 Register tmp = O5;
43 assert_different_registers(addr, count, tmp);
44 Label filtered;
45 // Is marking active?
46 if (in_bytes(SATBMarkQueue::byte_width_of_active()) == 4) {
47 __ ld(G2, in_bytes(G1ThreadLocalData::satb_mark_queue_active_offset()), tmp);
|
6 * published by the Free Software Foundation.
7 *
8 * This code is distributed in the hope that it will be useful, but WITHOUT
9 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
10 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
11 * version 2 for more details (a copy is included in the LICENSE file that
12 * accompanied this code).
13 *
14 * You should have received a copy of the GNU General Public License version
15 * 2 along with this work; if not, write to the Free Software Foundation,
16 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
17 *
18 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
19 * or visit www.oracle.com if you need additional information or have any
20 * questions.
21 *
22 */
23
24 #include "precompiled.hpp"
25 #include "asm/macroAssembler.inline.hpp"
26 #include "c1/c1_LIRAssembler.hpp"
27 #include "c1/c1_MacroAssembler.hpp"
28 #include "gc/g1/c1/g1BarrierSetC1.hpp"
29 #include "gc/g1/g1BarrierSet.hpp"
30 #include "gc/g1/g1BarrierSetAssembler.hpp"
31 #include "gc/g1/g1CardTable.hpp"
32 #include "gc/g1/g1ThreadLocalData.hpp"
33 #include "gc/g1/heapRegion.hpp"
34 #include "interpreter/interp_masm.hpp"
35 #include "runtime/sharedRuntime.hpp"
36 #include "utilities/macros.hpp"
37
38 #define __ masm->
39
40 void G1BarrierSetAssembler::gen_write_ref_array_pre_barrier(MacroAssembler* masm, DecoratorSet decorators,
41 Register addr, Register count) {
42 bool dest_uninitialized = (decorators & AS_DEST_NOT_INITIALIZED) != 0;
43 // With G1, don't generate the call if we statically know that the target in uninitialized
44 if (!dest_uninitialized) {
45 Register tmp = O5;
46 assert_different_registers(addr, count, tmp);
47 Label filtered;
48 // Is marking active?
49 if (in_bytes(SATBMarkQueue::byte_width_of_active()) == 4) {
50 __ ld(G2, in_bytes(G1ThreadLocalData::satb_mark_queue_active_offset()), tmp);
|
476 }
477
478 #undef __
479
480 void G1BarrierSetAssembler::barrier_stubs_init() {
481 if (dirty_card_log_enqueue == 0) {
482 G1BarrierSet* bs = barrier_set_cast<G1BarrierSet>(BarrierSet::barrier_set());
483 CardTable *ct = bs->card_table();
484 generate_dirty_card_log_enqueue(ct->byte_map_base());
485 assert(dirty_card_log_enqueue != 0, "postcondition.");
486 }
487 if (satb_log_enqueue_with_frame == 0) {
488 generate_satb_log_enqueue(true);
489 assert(satb_log_enqueue_with_frame != 0, "postcondition.");
490 }
491 if (satb_log_enqueue_frameless == 0) {
492 generate_satb_log_enqueue(false);
493 assert(satb_log_enqueue_frameless != 0, "postcondition.");
494 }
495 }
|
479 }
480
481 #undef __
482
483 void G1BarrierSetAssembler::barrier_stubs_init() {
484 if (dirty_card_log_enqueue == 0) {
485 G1BarrierSet* bs = barrier_set_cast<G1BarrierSet>(BarrierSet::barrier_set());
486 CardTable *ct = bs->card_table();
487 generate_dirty_card_log_enqueue(ct->byte_map_base());
488 assert(dirty_card_log_enqueue != 0, "postcondition.");
489 }
490 if (satb_log_enqueue_with_frame == 0) {
491 generate_satb_log_enqueue(true);
492 assert(satb_log_enqueue_with_frame != 0, "postcondition.");
493 }
494 if (satb_log_enqueue_frameless == 0) {
495 generate_satb_log_enqueue(false);
496 assert(satb_log_enqueue_frameless != 0, "postcondition.");
497 }
498 }
499
500 #define __ ce->masm()->
501
502 void G1BarrierSetAssembler::gen_g1_pre_barrier_stub(LIR_Assembler* ce, G1PreBarrierStub* stub) {
503 G1BarrierSetC1* bs = (G1BarrierSetC1*)BarrierSet::barrier_set()->barrier_set_c1();
504 // At this point we know that marking is in progress.
505 // If do_load() is true then we have to emit the
506 // load of the previous value; otherwise it has already
507 // been loaded into _pre_val.
508
509 __ bind(*stub->entry());
510
511 assert(stub->pre_val()->is_register(), "Precondition.");
512 Register pre_val_reg = stub->pre_val()->as_register();
513
514 if (stub->do_load()) {
515 ce->mem2reg(stub->addr(), stub->pre_val(), T_OBJECT, stub->patch_code(), stub->info(), false /*wide*/, false /*unaligned*/);
516 }
517
518 if (__ is_in_wdisp16_range(*stub->continuation())) {
519 __ br_null(pre_val_reg, /*annul*/false, Assembler::pt, *stub->continuation());
520 } else {
521 __ cmp(pre_val_reg, G0);
522 __ brx(Assembler::equal, false, Assembler::pn, *stub->continuation());
523 }
524 __ delayed()->nop();
525
526 __ call(bs->pre_barrier_c1_runtime_code_blob()->code_begin());
527 __ delayed()->mov(pre_val_reg, G4);
528 __ br(Assembler::always, false, Assembler::pt, *stub->continuation());
529 __ delayed()->nop();
530 }
531
532 void G1BarrierSetAssembler::gen_g1_post_barrier_stub(LIR_Assembler* ce, G1PostBarrierStub* stub) {
533 G1BarrierSetC1* bs = (G1BarrierSetC1*)BarrierSet::barrier_set()->barrier_set_c1();
534 __ bind(*stub->entry());
535
536 assert(stub->addr()->is_register(), "Precondition.");
537 assert(stub->new_val()->is_register(), "Precondition.");
538 Register addr_reg = stub->addr()->as_pointer_register();
539 Register new_val_reg = stub->new_val()->as_register();
540
541 if (__ is_in_wdisp16_range(*stub->continuation())) {
542 __ br_null(new_val_reg, /*annul*/false, Assembler::pt, *stub->continuation());
543 } else {
544 __ cmp(new_val_reg, G0);
545 __ brx(Assembler::equal, false, Assembler::pn, *stub->continuation());
546 }
547 __ delayed()->nop();
548
549 __ call(bs->post_barrier_c1_runtime_code_blob()->code_begin());
550 __ delayed()->mov(addr_reg, G4);
551 __ br(Assembler::always, false, Assembler::pt, *stub->continuation());
552 __ delayed()->nop();
553 }
554
555 #undef __
556 #define __ sasm->
557
558 void G1BarrierSetAssembler::generate_c1_pre_barrier_runtime_stub(StubAssembler* sasm) {
559 __ prologue("g1_pre_barrier", false);
560
561 // G4: previous value of memory
562
563 Register pre_val = G4;
564 Register tmp = G1_scratch;
565 Register tmp2 = G3_scratch;
566
567 Label refill, restart;
568 int satb_q_active_byte_offset = in_bytes(G1ThreadLocalData::satb_mark_queue_active_offset());
569 int satb_q_index_byte_offset = in_bytes(G1ThreadLocalData::satb_mark_queue_index_offset());
570 int satb_q_buf_byte_offset = in_bytes(G1ThreadLocalData::satb_mark_queue_buffer_offset());
571
572 // Is marking still active?
573 if (in_bytes(SATBMarkQueue::byte_width_of_active()) == 4) {
574 __ ld(G2_thread, satb_q_active_byte_offset, tmp);
575 } else {
576 assert(in_bytes(SATBMarkQueue::byte_width_of_active()) == 1, "Assumption");
577 __ ldsb(G2_thread, satb_q_active_byte_offset, tmp);
578 }
579 __ cmp_and_br_short(tmp, G0, Assembler::notEqual, Assembler::pt, restart);
580 __ retl();
581 __ delayed()->nop();
582
583 __ bind(restart);
584 // Load the index into the SATB buffer. SATBMarkQueue::_index is a
585 // size_t so ld_ptr is appropriate
586 __ ld_ptr(G2_thread, satb_q_index_byte_offset, tmp);
587
588 // index == 0?
589 __ cmp_and_brx_short(tmp, G0, Assembler::equal, Assembler::pn, refill);
590
591 __ ld_ptr(G2_thread, satb_q_buf_byte_offset, tmp2);
592 __ sub(tmp, oopSize, tmp);
593
594 __ st_ptr(pre_val, tmp2, tmp); // [_buf + index] := <address_of_card>
595 // Use return-from-leaf
596 __ retl();
597 __ delayed()->st_ptr(tmp, G2_thread, satb_q_index_byte_offset);
598
599 __ bind(refill);
600
601 __ save_live_registers_no_oop_map(true);
602
603 __ call_VM_leaf(L7_thread_cache,
604 CAST_FROM_FN_PTR(address,
605 SATBMarkQueueSet::handle_zero_index_for_thread),
606 G2_thread);
607
608 __ restore_live_registers(true);
609
610 __ br(Assembler::always, /*annul*/false, Assembler::pt, restart);
611 __ epilogue();
612 }
613
614 void G1BarrierSetAssembler::generate_c1_post_barrier_runtime_stub(StubAssembler* sasm) {
615 __ prologue("g1_post_barrier", false);
616
617 G1BarrierSet* bs = barrier_set_cast<G1BarrierSet>(BarrierSet::barrier_set());
618
619 Register addr = G4;
620 Register cardtable = G5;
621 Register tmp = G1_scratch;
622 Register tmp2 = G3_scratch;
623 jbyte* byte_map_base = bs->card_table()->byte_map_base();
624
625 Label not_already_dirty, restart, refill, young_card;
626
627 #ifdef _LP64
628 __ srlx(addr, CardTable::card_shift, addr);
629 #else
630 __ srl(addr, CardTable::card_shift, addr);
631 #endif
632
633 AddressLiteral rs((address)byte_map_base);
634 __ set(rs, cardtable); // cardtable := <card table base>
635 __ ldub(addr, cardtable, tmp); // tmp := [addr + cardtable]
636
637 __ cmp_and_br_short(tmp, G1CardTable::g1_young_card_val(), Assembler::equal, Assembler::pt, young_card);
638
639 __ membar(Assembler::Membar_mask_bits(Assembler::StoreLoad));
640 __ ldub(addr, cardtable, tmp); // tmp := [addr + cardtable]
641
642 assert(G1CardTable::dirty_card_val() == 0, "otherwise check this code");
643 __ cmp_and_br_short(tmp, G0, Assembler::notEqual, Assembler::pt, not_already_dirty);
644
645 __ bind(young_card);
646 // We didn't take the branch, so we're already dirty: return.
647 // Use return-from-leaf
648 __ retl();
649 __ delayed()->nop();
650
651 // Not dirty.
652 __ bind(not_already_dirty);
653
654 // Get cardtable + tmp into a reg by itself
655 __ add(addr, cardtable, tmp2);
656
657 // First, dirty it.
658 __ stb(G0, tmp2, 0); // [cardPtr] := 0 (i.e., dirty).
659
660 Register tmp3 = cardtable;
661 Register tmp4 = tmp;
662
663 // these registers are now dead
664 addr = cardtable = tmp = noreg;
665
666 int dirty_card_q_index_byte_offset = in_bytes(G1ThreadLocalData::dirty_card_queue_index_offset());
667 int dirty_card_q_buf_byte_offset = in_bytes(G1ThreadLocalData::dirty_card_queue_buffer_offset());
668
669 __ bind(restart);
670
671 // Get the index into the update buffer. DirtyCardQueue::_index is
672 // a size_t so ld_ptr is appropriate here.
673 __ ld_ptr(G2_thread, dirty_card_q_index_byte_offset, tmp3);
674
675 // index == 0?
676 __ cmp_and_brx_short(tmp3, G0, Assembler::equal, Assembler::pn, refill);
677
678 __ ld_ptr(G2_thread, dirty_card_q_buf_byte_offset, tmp4);
679 __ sub(tmp3, oopSize, tmp3);
680
681 __ st_ptr(tmp2, tmp4, tmp3); // [_buf + index] := <address_of_card>
682 // Use return-from-leaf
683 __ retl();
684 __ delayed()->st_ptr(tmp3, G2_thread, dirty_card_q_index_byte_offset);
685
686 __ bind(refill);
687
688 __ save_live_registers_no_oop_map(true);
689
690 __ call_VM_leaf(L7_thread_cache,
691 CAST_FROM_FN_PTR(address,
692 DirtyCardQueueSet::handle_zero_index_for_thread),
693 G2_thread);
694
695 __ restore_live_registers(true);
696
697 __ br(Assembler::always, /*annul*/false, Assembler::pt, restart);
698 __ epilogue();
699 }
700
701 #undef __
|