554 __ reg2mem_opt(Z_R1_scratch, dest_addr, true);
555 }
556 return;
557
558 case T_LONG: // fall through
559 case T_DOUBLE:
560 dest_addr = frame_map()->address_for_slot(dest->double_stack_ix());
561 lmem = 8; lcon = 8; cbits = (int64_t)(c->as_jlong_bits());
562 break;
563
564 default:
565 ShouldNotReachHere();
566 }
567
568 __ store_const(dest_addr, cbits, lmem, lcon);
569 }
570
571 void LIR_Assembler::const2mem(LIR_Opr src, LIR_Opr dest, BasicType type, CodeEmitInfo* info, bool wide) {
572 assert(src->is_constant(), "should not call otherwise");
573 assert(dest->is_address(), "should not call otherwise");
574 // See special case in LIRGenerator::do_StoreIndexed.
575 // T_BYTE: Special case for card mark store.
576 assert(type == T_BYTE || !dest->as_address_ptr()->index()->is_valid(), "not supported");
577 LIR_Const* c = src->as_constant_ptr();
578 Address addr = as_Address(dest->as_address_ptr());
579
580 int store_offset = -1;
581 unsigned int lmem = 0;
582 unsigned int lcon = 0;
583 int64_t cbits = 0;
584 switch (type) {
585 case T_INT: // fall through
586 case T_FLOAT:
587 lmem = 4; lcon = 4; cbits = c->as_jint_bits();
588 break;
589
590 case T_ADDRESS:
591 lmem = 8; lcon = 4; cbits = c->as_jint_bits();
592 break;
593
594 case T_OBJECT: // fall through
595 case T_ARRAY:
596 if (c->as_jobject() == NULL) {
597 if (UseCompressedOops && !wide) {
598 store_offset = __ store_const(addr, (int32_t)NULL_WORD, 4, 4);
599 } else {
600 store_offset = __ store_const(addr, (int64_t)NULL_WORD, 8, 8);
601 }
602 } else {
603 jobject2reg(c->as_jobject(), Z_R1_scratch);
604 if (UseCompressedOops && !wide) {
605 __ encode_heap_oop(Z_R1_scratch);
606 store_offset = __ reg2mem_opt(Z_R1_scratch, addr, false);
607 } else {
608 store_offset = __ reg2mem_opt(Z_R1_scratch, addr, true);
609 }
610 }
611 assert(store_offset >= 0, "check");
612 break;
613
614 case T_LONG: // fall through
615 case T_DOUBLE:
616 lmem = 8; lcon = 8; cbits = (int64_t)(c->as_jlong_bits());
617 break;
618
619 case T_BOOLEAN: // fall through
620 case T_BYTE:
621 lmem = 1; lcon = 1; cbits = (int8_t)(c->as_jint());
622 break;
623
624 case T_CHAR: // fall through
625 case T_SHORT:
626 lmem = 2; lcon = 2; cbits = (int16_t)(c->as_jint());
627 break;
628
629 default:
630 ShouldNotReachHere();
631 };
632
633 // Index register is normally not supported, but for
634 // LIRGenerator::CardTableBarrierSet_post_barrier we make an exception.
635 if (type == T_BYTE && dest->as_address_ptr()->index()->is_valid()) {
636 __ load_const_optimized(Z_R0_scratch, (int8_t)(c->as_jint()));
637 store_offset = __ offset();
638 if (Immediate::is_uimm12(addr.disp())) {
639 __ z_stc(Z_R0_scratch, addr);
640 } else {
641 __ z_stcy(Z_R0_scratch, addr);
642 }
643 }
644
645 if (store_offset == -1) {
646 store_offset = __ store_const(addr, cbits, lmem, lcon);
647 assert(store_offset >= 0, "check");
648 }
649
650 if (info != NULL) {
651 add_debug_info_for_null_check(store_offset, info);
652 }
653 }
654
655 void LIR_Assembler::const2reg(LIR_Opr src, LIR_Opr dest, LIR_PatchCode patch_code, CodeEmitInfo* info) {
656 assert(src->is_constant(), "should not call otherwise");
657 assert(dest->is_register(), "should not call otherwise");
658 LIR_Const* c = src->as_constant_ptr();
659
660 switch (c->type()) {
661 case T_INT: {
662 assert(patch_code == lir_patch_none, "no patching handled here");
663 __ load_const_optimized(dest->as_register(), c->as_jint());
664 break;
665 }
666
|
554 __ reg2mem_opt(Z_R1_scratch, dest_addr, true);
555 }
556 return;
557
558 case T_LONG: // fall through
559 case T_DOUBLE:
560 dest_addr = frame_map()->address_for_slot(dest->double_stack_ix());
561 lmem = 8; lcon = 8; cbits = (int64_t)(c->as_jlong_bits());
562 break;
563
564 default:
565 ShouldNotReachHere();
566 }
567
568 __ store_const(dest_addr, cbits, lmem, lcon);
569 }
570
571 void LIR_Assembler::const2mem(LIR_Opr src, LIR_Opr dest, BasicType type, CodeEmitInfo* info, bool wide) {
572 assert(src->is_constant(), "should not call otherwise");
573 assert(dest->is_address(), "should not call otherwise");
574
575 LIR_Const* c = src->as_constant_ptr();
576 Address addr = as_Address(dest->as_address_ptr());
577
578 int store_offset = -1;
579
580 if (dest->as_address_ptr()->index()->is_valid()) {
581 switch (type) {
582 case T_INT: // fall through
583 case T_FLOAT:
584 __ load_const_optimized(Z_R0_scratch, c->as_jint_bits());
585 store_offset = __ offset();
586 if (Immediate::is_uimm12(addr.disp())) {
587 __ z_st(Z_R0_scratch, addr);
588 } else {
589 __ z_sty(Z_R0_scratch, addr);
590 }
591 break;
592
593 case T_ADDRESS:
594 __ load_const_optimized(Z_R1_scratch, c->as_jint_bits());
595 store_offset = __ reg2mem_opt(Z_R1_scratch, addr, true);
596 break;
597
598 case T_OBJECT: // fall through
599 case T_ARRAY:
600 if (c->as_jobject() == NULL) {
601 if (UseCompressedOops && !wide) {
602 __ clear_reg(Z_R1_scratch, false);
603 store_offset = __ reg2mem_opt(Z_R1_scratch, addr, false);
604 } else {
605 __ clear_reg(Z_R1_scratch, true);
606 store_offset = __ reg2mem_opt(Z_R1_scratch, addr, true);
607 }
608 } else {
609 jobject2reg(c->as_jobject(), Z_R1_scratch);
610 if (UseCompressedOops && !wide) {
611 __ encode_heap_oop(Z_R1_scratch);
612 store_offset = __ reg2mem_opt(Z_R1_scratch, addr, false);
613 } else {
614 store_offset = __ reg2mem_opt(Z_R1_scratch, addr, true);
615 }
616 }
617 assert(store_offset >= 0, "check");
618 break;
619
620 case T_LONG: // fall through
621 case T_DOUBLE:
622 __ load_const_optimized(Z_R1_scratch, (int64_t)(c->as_jlong_bits()));
623 store_offset = __ reg2mem_opt(Z_R1_scratch, addr, true);
624 break;
625
626 case T_BOOLEAN: // fall through
627 case T_BYTE:
628 __ load_const_optimized(Z_R0_scratch, (int8_t)(c->as_jint()));
629 store_offset = __ offset();
630 if (Immediate::is_uimm12(addr.disp())) {
631 __ z_stc(Z_R0_scratch, addr);
632 } else {
633 __ z_stcy(Z_R0_scratch, addr);
634 }
635 break;
636
637 case T_CHAR: // fall through
638 case T_SHORT:
639 __ load_const_optimized(Z_R0_scratch, (int16_t)(c->as_jint()));
640 store_offset = __ offset();
641 if (Immediate::is_uimm12(addr.disp())) {
642 __ z_sth(Z_R0_scratch, addr);
643 } else {
644 __ z_sthy(Z_R0_scratch, addr);
645 }
646 break;
647
648 default:
649 ShouldNotReachHere();
650 }
651
652 } else { // no index
653
654 unsigned int lmem = 0;
655 unsigned int lcon = 0;
656 int64_t cbits = 0;
657
658 switch (type) {
659 case T_INT: // fall through
660 case T_FLOAT:
661 lmem = 4; lcon = 4; cbits = c->as_jint_bits();
662 break;
663
664 case T_ADDRESS:
665 lmem = 8; lcon = 4; cbits = c->as_jint_bits();
666 break;
667
668 case T_OBJECT: // fall through
669 case T_ARRAY:
670 if (c->as_jobject() == NULL) {
671 if (UseCompressedOops && !wide) {
672 store_offset = __ store_const(addr, (int32_t)NULL_WORD, 4, 4);
673 } else {
674 store_offset = __ store_const(addr, (int64_t)NULL_WORD, 8, 8);
675 }
676 } else {
677 jobject2reg(c->as_jobject(), Z_R1_scratch);
678 if (UseCompressedOops && !wide) {
679 __ encode_heap_oop(Z_R1_scratch);
680 store_offset = __ reg2mem_opt(Z_R1_scratch, addr, false);
681 } else {
682 store_offset = __ reg2mem_opt(Z_R1_scratch, addr, true);
683 }
684 }
685 assert(store_offset >= 0, "check");
686 break;
687
688 case T_LONG: // fall through
689 case T_DOUBLE:
690 lmem = 8; lcon = 8; cbits = (int64_t)(c->as_jlong_bits());
691 break;
692
693 case T_BOOLEAN: // fall through
694 case T_BYTE:
695 lmem = 1; lcon = 1; cbits = (int8_t)(c->as_jint());
696 break;
697
698 case T_CHAR: // fall through
699 case T_SHORT:
700 lmem = 2; lcon = 2; cbits = (int16_t)(c->as_jint());
701 break;
702
703 default:
704 ShouldNotReachHere();
705 }
706
707 if (store_offset == -1) {
708 store_offset = __ store_const(addr, cbits, lmem, lcon);
709 assert(store_offset >= 0, "check");
710 }
711 }
712
713 if (info != NULL) {
714 add_debug_info_for_null_check(store_offset, info);
715 }
716 }
717
718 void LIR_Assembler::const2reg(LIR_Opr src, LIR_Opr dest, LIR_PatchCode patch_code, CodeEmitInfo* info) {
719 assert(src->is_constant(), "should not call otherwise");
720 assert(dest->is_register(), "should not call otherwise");
721 LIR_Const* c = src->as_constant_ptr();
722
723 switch (c->type()) {
724 case T_INT: {
725 assert(patch_code == lir_patch_none, "no patching handled here");
726 __ load_const_optimized(dest->as_register(), c->as_jint());
727 break;
728 }
729
|