743 assert(r1 != array, "different registers");
744 __ mov(r1, index);
745 }
746 Label ok;
747 __ br(Assembler::LO, ok);
748 // ??? convention: move array into r3 for exception message
749 __ mov(r3, array);
750 __ mov(rscratch1, Interpreter::_throw_ArrayIndexOutOfBoundsException_entry);
751 __ br(rscratch1);
752 __ bind(ok);
753 }
754
755 void TemplateTable::iaload()
756 {
757 transition(itos, itos);
758 __ mov(r1, r0);
759 __ pop_ptr(r0);
760 // r0: array
761 // r1: index
762 index_check(r0, r1); // leaves index in r1, kills rscratch1
763 __ lea(r1, Address(r0, r1, Address::uxtw(2)));
764 __ ldrw(r0, Address(r1, arrayOopDesc::base_offset_in_bytes(T_INT)));
765 }
766
767 void TemplateTable::laload()
768 {
769 transition(itos, ltos);
770 __ mov(r1, r0);
771 __ pop_ptr(r0);
772 // r0: array
773 // r1: index
774 index_check(r0, r1); // leaves index in r1, kills rscratch1
775 __ lea(r1, Address(r0, r1, Address::uxtw(3)));
776 __ ldr(r0, Address(r1, arrayOopDesc::base_offset_in_bytes(T_LONG)));
777 }
778
779 void TemplateTable::faload()
780 {
781 transition(itos, ftos);
782 __ mov(r1, r0);
783 __ pop_ptr(r0);
784 // r0: array
785 // r1: index
786 index_check(r0, r1); // leaves index in r1, kills rscratch1
787 __ lea(r1, Address(r0, r1, Address::uxtw(2)));
788 __ ldrs(v0, Address(r1, arrayOopDesc::base_offset_in_bytes(T_FLOAT)));
789 }
790
791 void TemplateTable::daload()
792 {
793 transition(itos, dtos);
794 __ mov(r1, r0);
795 __ pop_ptr(r0);
796 // r0: array
797 // r1: index
798 index_check(r0, r1); // leaves index in r1, kills rscratch1
799 __ lea(r1, Address(r0, r1, Address::uxtw(3)));
800 __ ldrd(v0, Address(r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE)));
801 }
802
803 void TemplateTable::aaload()
804 {
805 transition(itos, atos);
806 __ mov(r1, r0);
807 __ pop_ptr(r0);
808 // r0: array
809 // r1: index
810 index_check(r0, r1); // leaves index in r1, kills rscratch1
811 int s = (UseCompressedOops ? 2 : 3);
812 __ lea(r1, Address(r0, r1, Address::uxtw(s)));
813 do_oop_load(_masm,
814 Address(r1, arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
815 r0,
816 IN_HEAP | IN_HEAP_ARRAY);
817 }
818
819 void TemplateTable::baload()
820 {
821 transition(itos, itos);
822 __ mov(r1, r0);
823 __ pop_ptr(r0);
824 // r0: array
825 // r1: index
826 index_check(r0, r1); // leaves index in r1, kills rscratch1
827 __ lea(r1, Address(r0, r1, Address::uxtw(0)));
828 __ load_signed_byte(r0, Address(r1, arrayOopDesc::base_offset_in_bytes(T_BYTE)));
829 }
830
831 void TemplateTable::caload()
832 {
833 transition(itos, itos);
834 __ mov(r1, r0);
835 __ pop_ptr(r0);
836 // r0: array
837 // r1: index
838 index_check(r0, r1); // leaves index in r1, kills rscratch1
839 __ lea(r1, Address(r0, r1, Address::uxtw(1)));
840 __ load_unsigned_short(r0, Address(r1, arrayOopDesc::base_offset_in_bytes(T_CHAR)));
841 }
842
843 // iload followed by caload frequent pair
844 void TemplateTable::fast_icaload()
845 {
846 transition(vtos, itos);
847 // load index out of locals
848 locals_index(r2);
849 __ ldr(r1, iaddress(r2));
850
851 __ pop_ptr(r0);
852
853 // r0: array
854 // r1: index
855 index_check(r0, r1); // leaves index in r1, kills rscratch1
856 __ lea(r1, Address(r0, r1, Address::uxtw(1)));
857 __ load_unsigned_short(r0, Address(r1, arrayOopDesc::base_offset_in_bytes(T_CHAR)));
858 }
859
860 void TemplateTable::saload()
861 {
862 transition(itos, itos);
863 __ mov(r1, r0);
864 __ pop_ptr(r0);
865 // r0: array
866 // r1: index
867 index_check(r0, r1); // leaves index in r1, kills rscratch1
868 __ lea(r1, Address(r0, r1, Address::uxtw(1)));
869 __ load_signed_short(r0, Address(r1, arrayOopDesc::base_offset_in_bytes(T_SHORT)));
870 }
871
872 void TemplateTable::iload(int n)
873 {
874 transition(vtos, itos);
875 __ ldr(r0, iaddress(n));
876 }
877
878 void TemplateTable::lload(int n)
879 {
880 transition(vtos, ltos);
881 __ ldr(r0, laddress(n));
882 }
883
884 void TemplateTable::fload(int n)
885 {
886 transition(vtos, ftos);
887 __ ldrs(v0, faddress(n));
888 }
889
1042 __ pop_d();
1043 locals_index_wide(r1);
1044 __ strd(v0, daddress(r1, rscratch1, _masm));
1045 }
1046
1047 void TemplateTable::wide_astore() {
1048 transition(vtos, vtos);
1049 __ pop_ptr(r0);
1050 locals_index_wide(r1);
1051 __ str(r0, aaddress(r1));
1052 }
1053
1054 void TemplateTable::iastore() {
1055 transition(itos, vtos);
1056 __ pop_i(r1);
1057 __ pop_ptr(r3);
1058 // r0: value
1059 // r1: index
1060 // r3: array
1061 index_check(r3, r1); // prefer index in r1
1062 __ lea(rscratch1, Address(r3, r1, Address::uxtw(2)));
1063 __ strw(r0, Address(rscratch1,
1064 arrayOopDesc::base_offset_in_bytes(T_INT)));
1065 }
1066
1067 void TemplateTable::lastore() {
1068 transition(ltos, vtos);
1069 __ pop_i(r1);
1070 __ pop_ptr(r3);
1071 // r0: value
1072 // r1: index
1073 // r3: array
1074 index_check(r3, r1); // prefer index in r1
1075 __ lea(rscratch1, Address(r3, r1, Address::uxtw(3)));
1076 __ str(r0, Address(rscratch1,
1077 arrayOopDesc::base_offset_in_bytes(T_LONG)));
1078 }
1079
1080 void TemplateTable::fastore() {
1081 transition(ftos, vtos);
1082 __ pop_i(r1);
1083 __ pop_ptr(r3);
1084 // v0: value
1085 // r1: index
1086 // r3: array
1087 index_check(r3, r1); // prefer index in r1
1088 __ lea(rscratch1, Address(r3, r1, Address::uxtw(2)));
1089 __ strs(v0, Address(rscratch1,
1090 arrayOopDesc::base_offset_in_bytes(T_FLOAT)));
1091 }
1092
1093 void TemplateTable::dastore() {
1094 transition(dtos, vtos);
1095 __ pop_i(r1);
1096 __ pop_ptr(r3);
1097 // v0: value
1098 // r1: index
1099 // r3: array
1100 index_check(r3, r1); // prefer index in r1
1101 __ lea(rscratch1, Address(r3, r1, Address::uxtw(3)));
1102 __ strd(v0, Address(rscratch1,
1103 arrayOopDesc::base_offset_in_bytes(T_DOUBLE)));
1104 }
1105
1106 void TemplateTable::aastore() {
1107 Label is_null, ok_is_subtype, done;
1108 transition(vtos, vtos);
1109 // stack: ..., array, index, value
1110 __ ldr(r0, at_tos()); // value
1111 __ ldr(r2, at_tos_p1()); // index
1112 __ ldr(r3, at_tos_p2()); // array
1113
1114 Address element_address(r4, arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1115
1116 index_check(r3, r2); // kills r1
1117 __ lea(r4, Address(r3, r2, Address::uxtw(UseCompressedOops? 2 : 3)));
1118
1119 // do array store check - check for NULL value first
1120 __ cbz(r0, is_null);
1121
1122 // Move subklass into r1
1123 __ load_klass(r1, r0);
1124 // Move superklass into r0
1125 __ load_klass(r0, r3);
1126 __ ldr(r0, Address(r0,
1127 ObjArrayKlass::element_klass_offset()));
1128 // Compress array + index*oopSize + 12 into a single register. Frees r2.
1129
1130 // Generate subtype check. Blows r2, r5
1131 // Superklass in r0. Subklass in r1.
1132 __ gen_subtype_check(r1, ok_is_subtype);
1133
1134 // Come here on failure
1135 // object is at TOS
1136 __ b(Interpreter::_throw_ArrayStoreException_entry);
1137
1159 void TemplateTable::bastore()
1160 {
1161 transition(itos, vtos);
1162 __ pop_i(r1);
1163 __ pop_ptr(r3);
1164 // r0: value
1165 // r1: index
1166 // r3: array
1167 index_check(r3, r1); // prefer index in r1
1168
1169 // Need to check whether array is boolean or byte
1170 // since both types share the bastore bytecode.
1171 __ load_klass(r2, r3);
1172 __ ldrw(r2, Address(r2, Klass::layout_helper_offset()));
1173 int diffbit_index = exact_log2(Klass::layout_helper_boolean_diffbit());
1174 Label L_skip;
1175 __ tbz(r2, diffbit_index, L_skip);
1176 __ andw(r0, r0, 1); // if it is a T_BOOLEAN array, mask the stored value to 0/1
1177 __ bind(L_skip);
1178
1179 __ lea(rscratch1, Address(r3, r1, Address::uxtw(0)));
1180 __ strb(r0, Address(rscratch1,
1181 arrayOopDesc::base_offset_in_bytes(T_BYTE)));
1182 }
1183
1184 void TemplateTable::castore()
1185 {
1186 transition(itos, vtos);
1187 __ pop_i(r1);
1188 __ pop_ptr(r3);
1189 // r0: value
1190 // r1: index
1191 // r3: array
1192 index_check(r3, r1); // prefer index in r1
1193 __ lea(rscratch1, Address(r3, r1, Address::uxtw(1)));
1194 __ strh(r0, Address(rscratch1,
1195 arrayOopDesc::base_offset_in_bytes(T_CHAR)));
1196 }
1197
1198 void TemplateTable::sastore()
1199 {
1200 castore();
1201 }
1202
1203 void TemplateTable::istore(int n)
1204 {
1205 transition(itos, vtos);
1206 __ str(r0, iaddress(n));
1207 }
1208
1209 void TemplateTable::lstore(int n)
1210 {
1211 transition(ltos, vtos);
1212 __ str(r0, laddress(n));
1213 }
1214
1215 void TemplateTable::fstore(int n)
2496 __ bind(notVolatile);
2497 }
2498
2499 const Address field(obj, off);
2500
2501 Label Done, notByte, notBool, notInt, notShort, notChar,
2502 notLong, notFloat, notObj, notDouble;
2503
2504 // x86 uses a shift and mask or wings it with a shift plus assert
2505 // the mask is not needed. aarch64 just uses bitfield extract
2506 __ ubfxw(flags, raw_flags, ConstantPoolCacheEntry::tos_state_shift,
2507 ConstantPoolCacheEntry::tos_state_bits);
2508
2509 assert(btos == 0, "change code, btos != 0");
2510 __ cbnz(flags, notByte);
2511
2512 // Don't rewrite getstatic, only getfield
2513 if (is_static) rc = may_not_rewrite;
2514
2515 // btos
2516 __ load_signed_byte(r0, field);
2517 __ push(btos);
2518 // Rewrite bytecode to be faster
2519 if (rc == may_rewrite) {
2520 patch_bytecode(Bytecodes::_fast_bgetfield, bc, r1);
2521 }
2522 __ b(Done);
2523
2524 __ bind(notByte);
2525 __ cmp(flags, ztos);
2526 __ br(Assembler::NE, notBool);
2527
2528 // ztos (same code as btos)
2529 __ ldrsb(r0, field);
2530 __ push(ztos);
2531 // Rewrite bytecode to be faster
2532 if (rc == may_rewrite) {
2533 // use btos rewriting, no truncating to t/f bit is needed for getfield.
2534 patch_bytecode(Bytecodes::_fast_bgetfield, bc, r1);
2535 }
2536 __ b(Done);
2537
2538 __ bind(notBool);
2539 __ cmp(flags, atos);
2540 __ br(Assembler::NE, notObj);
2541 // atos
2542 do_oop_load(_masm, field, r0, IN_HEAP);
2543 __ push(atos);
2544 if (rc == may_rewrite) {
2545 patch_bytecode(Bytecodes::_fast_agetfield, bc, r1);
2546 }
2547 __ b(Done);
2548
2549 __ bind(notObj);
2550 __ cmp(flags, itos);
2551 __ br(Assembler::NE, notInt);
2552 // itos
2553 __ ldrw(r0, field);
2554 __ push(itos);
2555 // Rewrite bytecode to be faster
2556 if (rc == may_rewrite) {
2557 patch_bytecode(Bytecodes::_fast_igetfield, bc, r1);
2558 }
2559 __ b(Done);
2560
2561 __ bind(notInt);
2562 __ cmp(flags, ctos);
2563 __ br(Assembler::NE, notChar);
2564 // ctos
2565 __ load_unsigned_short(r0, field);
2566 __ push(ctos);
2567 // Rewrite bytecode to be faster
2568 if (rc == may_rewrite) {
2569 patch_bytecode(Bytecodes::_fast_cgetfield, bc, r1);
2570 }
2571 __ b(Done);
2572
2573 __ bind(notChar);
2574 __ cmp(flags, stos);
2575 __ br(Assembler::NE, notShort);
2576 // stos
2577 __ load_signed_short(r0, field);
2578 __ push(stos);
2579 // Rewrite bytecode to be faster
2580 if (rc == may_rewrite) {
2581 patch_bytecode(Bytecodes::_fast_sgetfield, bc, r1);
2582 }
2583 __ b(Done);
2584
2585 __ bind(notShort);
2586 __ cmp(flags, ltos);
2587 __ br(Assembler::NE, notLong);
2588 // ltos
2589 __ ldr(r0, field);
2590 __ push(ltos);
2591 // Rewrite bytecode to be faster
2592 if (rc == may_rewrite) {
2593 patch_bytecode(Bytecodes::_fast_lgetfield, bc, r1);
2594 }
2595 __ b(Done);
2596
2597 __ bind(notLong);
2598 __ cmp(flags, ftos);
2599 __ br(Assembler::NE, notFloat);
2600 // ftos
2601 __ ldrs(v0, field);
2602 __ push(ftos);
2603 // Rewrite bytecode to be faster
2604 if (rc == may_rewrite) {
2605 patch_bytecode(Bytecodes::_fast_fgetfield, bc, r1);
2606 }
2607 __ b(Done);
2608
2609 __ bind(notFloat);
2610 #ifdef ASSERT
2611 __ cmp(flags, dtos);
2612 __ br(Assembler::NE, notDouble);
2613 #endif
2614 // dtos
2615 __ ldrd(v0, field);
2616 __ push(dtos);
2617 // Rewrite bytecode to be faster
2618 if (rc == may_rewrite) {
2619 patch_bytecode(Bytecodes::_fast_dgetfield, bc, r1);
2620 }
2621 #ifdef ASSERT
2622 __ b(Done);
2623
2624 __ bind(notDouble);
2625 __ stop("Bad state");
2626 #endif
2627
2628 __ bind(Done);
2629
2630 Label notVolatile;
2631 __ tbz(raw_flags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
2632 __ membar(MacroAssembler::LoadLoad | MacroAssembler::LoadStore);
2633 __ bind(notVolatile);
2634 }
2635
2733 // field address
2734 const Address field(obj, off);
2735
2736 Label notByte, notBool, notInt, notShort, notChar,
2737 notLong, notFloat, notObj, notDouble;
2738
2739 // x86 uses a shift and mask or wings it with a shift plus assert
2740 // the mask is not needed. aarch64 just uses bitfield extract
2741 __ ubfxw(flags, flags, ConstantPoolCacheEntry::tos_state_shift, ConstantPoolCacheEntry::tos_state_bits);
2742
2743 assert(btos == 0, "change code, btos != 0");
2744 __ cbnz(flags, notByte);
2745
2746 // Don't rewrite putstatic, only putfield
2747 if (is_static) rc = may_not_rewrite;
2748
2749 // btos
2750 {
2751 __ pop(btos);
2752 if (!is_static) pop_and_check_object(obj);
2753 __ strb(r0, field);
2754 if (rc == may_rewrite) {
2755 patch_bytecode(Bytecodes::_fast_bputfield, bc, r1, true, byte_no);
2756 }
2757 __ b(Done);
2758 }
2759
2760 __ bind(notByte);
2761 __ cmp(flags, ztos);
2762 __ br(Assembler::NE, notBool);
2763
2764 // ztos
2765 {
2766 __ pop(ztos);
2767 if (!is_static) pop_and_check_object(obj);
2768 __ andw(r0, r0, 0x1);
2769 __ strb(r0, field);
2770 if (rc == may_rewrite) {
2771 patch_bytecode(Bytecodes::_fast_zputfield, bc, r1, true, byte_no);
2772 }
2773 __ b(Done);
2774 }
2775
2776 __ bind(notBool);
2777 __ cmp(flags, atos);
2778 __ br(Assembler::NE, notObj);
2779
2780 // atos
2781 {
2782 __ pop(atos);
2783 if (!is_static) pop_and_check_object(obj);
2784 // Store into the field
2785 do_oop_store(_masm, field, r0, IN_HEAP);
2786 if (rc == may_rewrite) {
2787 patch_bytecode(Bytecodes::_fast_aputfield, bc, r1, true, byte_no);
2788 }
2789 __ b(Done);
2790 }
2791
2792 __ bind(notObj);
2793 __ cmp(flags, itos);
2794 __ br(Assembler::NE, notInt);
2795
2796 // itos
2797 {
2798 __ pop(itos);
2799 if (!is_static) pop_and_check_object(obj);
2800 __ strw(r0, field);
2801 if (rc == may_rewrite) {
2802 patch_bytecode(Bytecodes::_fast_iputfield, bc, r1, true, byte_no);
2803 }
2804 __ b(Done);
2805 }
2806
2807 __ bind(notInt);
2808 __ cmp(flags, ctos);
2809 __ br(Assembler::NE, notChar);
2810
2811 // ctos
2812 {
2813 __ pop(ctos);
2814 if (!is_static) pop_and_check_object(obj);
2815 __ strh(r0, field);
2816 if (rc == may_rewrite) {
2817 patch_bytecode(Bytecodes::_fast_cputfield, bc, r1, true, byte_no);
2818 }
2819 __ b(Done);
2820 }
2821
2822 __ bind(notChar);
2823 __ cmp(flags, stos);
2824 __ br(Assembler::NE, notShort);
2825
2826 // stos
2827 {
2828 __ pop(stos);
2829 if (!is_static) pop_and_check_object(obj);
2830 __ strh(r0, field);
2831 if (rc == may_rewrite) {
2832 patch_bytecode(Bytecodes::_fast_sputfield, bc, r1, true, byte_no);
2833 }
2834 __ b(Done);
2835 }
2836
2837 __ bind(notShort);
2838 __ cmp(flags, ltos);
2839 __ br(Assembler::NE, notLong);
2840
2841 // ltos
2842 {
2843 __ pop(ltos);
2844 if (!is_static) pop_and_check_object(obj);
2845 __ str(r0, field);
2846 if (rc == may_rewrite) {
2847 patch_bytecode(Bytecodes::_fast_lputfield, bc, r1, true, byte_no);
2848 }
2849 __ b(Done);
2850 }
2851
2852 __ bind(notLong);
2853 __ cmp(flags, ftos);
2854 __ br(Assembler::NE, notFloat);
2855
2856 // ftos
2857 {
2858 __ pop(ftos);
2859 if (!is_static) pop_and_check_object(obj);
2860 __ strs(v0, field);
2861 if (rc == may_rewrite) {
2862 patch_bytecode(Bytecodes::_fast_fputfield, bc, r1, true, byte_no);
2863 }
2864 __ b(Done);
2865 }
2866
2867 __ bind(notFloat);
2868 #ifdef ASSERT
2869 __ cmp(flags, dtos);
2870 __ br(Assembler::NE, notDouble);
2871 #endif
2872
2873 // dtos
2874 {
2875 __ pop(dtos);
2876 if (!is_static) pop_and_check_object(obj);
2877 __ strd(v0, field);
2878 if (rc == may_rewrite) {
2879 patch_bytecode(Bytecodes::_fast_dputfield, bc, r1, true, byte_no);
2880 }
2881 }
2882
2883 #ifdef ASSERT
2884 __ b(Done);
2885
2886 __ bind(notDouble);
2887 __ stop("Bad state");
2888 #endif
2889
2890 __ bind(Done);
2891
2892 {
2893 Label notVolatile;
2894 __ tbz(r5, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
2895 __ membar(MacroAssembler::StoreLoad);
2896 __ bind(notVolatile);
2897 }
2988 Label notVolatile;
2989 __ tbz(r3, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
2990 __ membar(MacroAssembler::StoreStore);
2991 __ bind(notVolatile);
2992 }
2993
2994 Label notVolatile;
2995
2996 // Get object from stack
2997 pop_and_check_object(r2);
2998
2999 // field address
3000 const Address field(r2, r1);
3001
3002 // access field
3003 switch (bytecode()) {
3004 case Bytecodes::_fast_aputfield:
3005 do_oop_store(_masm, field, r0, IN_HEAP);
3006 break;
3007 case Bytecodes::_fast_lputfield:
3008 __ str(r0, field);
3009 break;
3010 case Bytecodes::_fast_iputfield:
3011 __ strw(r0, field);
3012 break;
3013 case Bytecodes::_fast_zputfield:
3014 __ andw(r0, r0, 0x1); // boolean is true if LSB is 1
3015 // fall through to bputfield
3016 case Bytecodes::_fast_bputfield:
3017 __ strb(r0, field);
3018 break;
3019 case Bytecodes::_fast_sputfield:
3020 // fall through
3021 case Bytecodes::_fast_cputfield:
3022 __ strh(r0, field);
3023 break;
3024 case Bytecodes::_fast_fputfield:
3025 __ strs(v0, field);
3026 break;
3027 case Bytecodes::_fast_dputfield:
3028 __ strd(v0, field);
3029 break;
3030 default:
3031 ShouldNotReachHere();
3032 }
3033
3034 {
3035 Label notVolatile;
3036 __ tbz(r3, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3037 __ membar(MacroAssembler::StoreLoad);
3038 __ bind(notVolatile);
3039 }
3040 }
3041
3042
3043 void TemplateTable::fast_accessfield(TosState state)
3044 {
3045 transition(atos, state);
3046 // Do the JVMTI work here to avoid disturbing the register state below
3047 if (JvmtiExport::can_post_field_access()) {
3048 // Check to see if a field access watch has been set before we
3081 // 8179954: We need to make sure that the code generated for
3082 // volatile accesses forms a sequentially-consistent set of
3083 // operations when combined with STLR and LDAR. Without a leading
3084 // membar it's possible for a simple Dekker test to fail if loads
3085 // use LDR;DMB but stores use STLR. This can happen if C2 compiles
3086 // the stores in one method and we interpret the loads in another.
3087 if (! UseBarriersForVolatile) {
3088 Label notVolatile;
3089 __ tbz(r3, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3090 __ membar(MacroAssembler::AnyAny);
3091 __ bind(notVolatile);
3092 }
3093
3094 // access field
3095 switch (bytecode()) {
3096 case Bytecodes::_fast_agetfield:
3097 do_oop_load(_masm, field, r0, IN_HEAP);
3098 __ verify_oop(r0);
3099 break;
3100 case Bytecodes::_fast_lgetfield:
3101 __ ldr(r0, field);
3102 break;
3103 case Bytecodes::_fast_igetfield:
3104 __ ldrw(r0, field);
3105 break;
3106 case Bytecodes::_fast_bgetfield:
3107 __ load_signed_byte(r0, field);
3108 break;
3109 case Bytecodes::_fast_sgetfield:
3110 __ load_signed_short(r0, field);
3111 break;
3112 case Bytecodes::_fast_cgetfield:
3113 __ load_unsigned_short(r0, field);
3114 break;
3115 case Bytecodes::_fast_fgetfield:
3116 __ ldrs(v0, field);
3117 break;
3118 case Bytecodes::_fast_dgetfield:
3119 __ ldrd(v0, field);
3120 break;
3121 default:
3122 ShouldNotReachHere();
3123 }
3124 {
3125 Label notVolatile;
3126 __ tbz(r3, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3127 __ membar(MacroAssembler::LoadLoad | MacroAssembler::LoadStore);
3128 __ bind(notVolatile);
3129 }
3130 }
3131
3132 void TemplateTable::fast_xaccess(TosState state)
3133 {
3134 transition(vtos, state);
3135
3136 // get receiver
3137 __ ldr(r0, aaddress(0));
3138 // access constant pool cache
3139 __ get_cache_and_index_at_bcp(r2, r3, 2);
3144 // volatile accesses forms a sequentially-consistent set of
3145 // operations when combined with STLR and LDAR. Without a leading
3146 // membar it's possible for a simple Dekker test to fail if loads
3147 // use LDR;DMB but stores use STLR. This can happen if C2 compiles
3148 // the stores in one method and we interpret the loads in another.
3149 if (! UseBarriersForVolatile) {
3150 Label notVolatile;
3151 __ ldrw(r3, Address(r2, in_bytes(ConstantPoolCache::base_offset() +
3152 ConstantPoolCacheEntry::flags_offset())));
3153 __ tbz(r3, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3154 __ membar(MacroAssembler::AnyAny);
3155 __ bind(notVolatile);
3156 }
3157
3158 // make sure exception is reported in correct bcp range (getfield is
3159 // next instruction)
3160 __ increment(rbcp);
3161 __ null_check(r0);
3162 switch (state) {
3163 case itos:
3164 __ ldrw(r0, Address(r0, r1, Address::lsl(0)));
3165 break;
3166 case atos:
3167 do_oop_load(_masm, Address(r0, r1, Address::lsl(0)), r0, IN_HEAP);
3168 __ verify_oop(r0);
3169 break;
3170 case ftos:
3171 __ ldrs(v0, Address(r0, r1, Address::lsl(0)));
3172 break;
3173 default:
3174 ShouldNotReachHere();
3175 }
3176
3177 {
3178 Label notVolatile;
3179 __ ldrw(r3, Address(r2, in_bytes(ConstantPoolCache::base_offset() +
3180 ConstantPoolCacheEntry::flags_offset())));
3181 __ tbz(r3, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3182 __ membar(MacroAssembler::LoadLoad | MacroAssembler::LoadStore);
3183 __ bind(notVolatile);
3184 }
3185
3186 __ decrement(rbcp);
3187 }
3188
3189
3190
3191 //-----------------------------------------------------------------------------
|
743 assert(r1 != array, "different registers");
744 __ mov(r1, index);
745 }
746 Label ok;
747 __ br(Assembler::LO, ok);
748 // ??? convention: move array into r3 for exception message
749 __ mov(r3, array);
750 __ mov(rscratch1, Interpreter::_throw_ArrayIndexOutOfBoundsException_entry);
751 __ br(rscratch1);
752 __ bind(ok);
753 }
754
755 void TemplateTable::iaload()
756 {
757 transition(itos, itos);
758 __ mov(r1, r0);
759 __ pop_ptr(r0);
760 // r0: array
761 // r1: index
762 index_check(r0, r1); // leaves index in r1, kills rscratch1
763 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_INT) >> 2);
764 __ access_load_at(T_INT, IN_HEAP | IN_HEAP_ARRAY, r0, Address(r0, r1, Address::uxtw(2)), noreg, noreg);
765 }
766
767 void TemplateTable::laload()
768 {
769 transition(itos, ltos);
770 __ mov(r1, r0);
771 __ pop_ptr(r0);
772 // r0: array
773 // r1: index
774 index_check(r0, r1); // leaves index in r1, kills rscratch1
775 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_LONG) >> 3);
776 __ access_load_at(T_LONG, IN_HEAP | IN_HEAP_ARRAY, r0, Address(r0, r1, Address::uxtw(3)), noreg, noreg);
777 }
778
779 void TemplateTable::faload()
780 {
781 transition(itos, ftos);
782 __ mov(r1, r0);
783 __ pop_ptr(r0);
784 // r0: array
785 // r1: index
786 index_check(r0, r1); // leaves index in r1, kills rscratch1
787 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_FLOAT) >> 2);
788 __ access_load_at(T_FLOAT, IN_HEAP | IN_HEAP_ARRAY, r0, Address(r0, r1, Address::uxtw(2)), noreg, noreg);
789 }
790
791 void TemplateTable::daload()
792 {
793 transition(itos, dtos);
794 __ mov(r1, r0);
795 __ pop_ptr(r0);
796 // r0: array
797 // r1: index
798 index_check(r0, r1); // leaves index in r1, kills rscratch1
799 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
800 __ access_load_at(T_DOUBLE, IN_HEAP | IN_HEAP_ARRAY, r0, Address(r0, r1, Address::uxtw(3)), noreg, noreg);
801 }
802
803 void TemplateTable::aaload()
804 {
805 transition(itos, atos);
806 __ mov(r1, r0);
807 __ pop_ptr(r0);
808 // r0: array
809 // r1: index
810 index_check(r0, r1); // leaves index in r1, kills rscratch1
811 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
812 do_oop_load(_masm,
813 Address(r0, r1, Address::uxtw(LogBytesPerHeapOop)),
814 r0,
815 IN_HEAP | IN_HEAP_ARRAY);
816 }
817
818 void TemplateTable::baload()
819 {
820 transition(itos, itos);
821 __ mov(r1, r0);
822 __ pop_ptr(r0);
823 // r0: array
824 // r1: index
825 index_check(r0, r1); // leaves index in r1, kills rscratch1
826 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_BYTE) >> 0);
827 __ access_load_at(T_BYTE, IN_HEAP | IN_HEAP_ARRAY, r0, Address(r0, r1, Address::uxtw(0)), noreg, noreg);
828 }
829
830 void TemplateTable::caload()
831 {
832 transition(itos, itos);
833 __ mov(r1, r0);
834 __ pop_ptr(r0);
835 // r0: array
836 // r1: index
837 index_check(r0, r1); // leaves index in r1, kills rscratch1
838 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_CHAR) >> 1);
839 __ access_load_at(T_CHAR, IN_HEAP | IN_HEAP_ARRAY, r0, Address(r0, r1, Address::uxtw(1)), noreg, noreg);
840 }
841
842 // iload followed by caload frequent pair
843 void TemplateTable::fast_icaload()
844 {
845 transition(vtos, itos);
846 // load index out of locals
847 locals_index(r2);
848 __ ldr(r1, iaddress(r2));
849
850 __ pop_ptr(r0);
851
852 // r0: array
853 // r1: index
854 index_check(r0, r1); // leaves index in r1, kills rscratch1
855 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_CHAR) >> 1);
856 __ access_load_at(T_CHAR, IN_HEAP | IN_HEAP_ARRAY, r0, Address(r0, r1, Address::uxtw(1)), noreg, noreg);
857 }
858
859 void TemplateTable::saload()
860 {
861 transition(itos, itos);
862 __ mov(r1, r0);
863 __ pop_ptr(r0);
864 // r0: array
865 // r1: index
866 index_check(r0, r1); // leaves index in r1, kills rscratch1
867 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_SHORT) >> 1);
868 __ access_load_at(T_SHORT, IN_HEAP | IN_HEAP_ARRAY, r0, Address(r0, r1, Address::uxtw(1)), noreg, noreg);
869 }
870
871 void TemplateTable::iload(int n)
872 {
873 transition(vtos, itos);
874 __ ldr(r0, iaddress(n));
875 }
876
877 void TemplateTable::lload(int n)
878 {
879 transition(vtos, ltos);
880 __ ldr(r0, laddress(n));
881 }
882
883 void TemplateTable::fload(int n)
884 {
885 transition(vtos, ftos);
886 __ ldrs(v0, faddress(n));
887 }
888
1041 __ pop_d();
1042 locals_index_wide(r1);
1043 __ strd(v0, daddress(r1, rscratch1, _masm));
1044 }
1045
1046 void TemplateTable::wide_astore() {
1047 transition(vtos, vtos);
1048 __ pop_ptr(r0);
1049 locals_index_wide(r1);
1050 __ str(r0, aaddress(r1));
1051 }
1052
1053 void TemplateTable::iastore() {
1054 transition(itos, vtos);
1055 __ pop_i(r1);
1056 __ pop_ptr(r3);
1057 // r0: value
1058 // r1: index
1059 // r3: array
1060 index_check(r3, r1); // prefer index in r1
1061 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_INT) >> 2);
1062 __ access_store_at(T_INT, IN_HEAP | IN_HEAP_ARRAY, Address(r3, r1, Address::uxtw(2)), r0, noreg, noreg);
1063 }
1064
1065 void TemplateTable::lastore() {
1066 transition(ltos, vtos);
1067 __ pop_i(r1);
1068 __ pop_ptr(r3);
1069 // r0: value
1070 // r1: index
1071 // r3: array
1072 index_check(r3, r1); // prefer index in r1
1073 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_LONG) >> 3);
1074 __ access_store_at(T_LONG, IN_HEAP | IN_HEAP_ARRAY, Address(r3, r1, Address::uxtw(3)), r0, noreg, noreg);
1075 }
1076
1077 void TemplateTable::fastore() {
1078 transition(ftos, vtos);
1079 __ pop_i(r1);
1080 __ pop_ptr(r3);
1081 // v0: value
1082 // r1: index
1083 // r3: array
1084 index_check(r3, r1); // prefer index in r1
1085 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_FLOAT) >> 2);
1086 __ access_store_at(T_FLOAT, IN_HEAP | IN_HEAP_ARRAY, Address(r3, r1, Address::uxtw(2)), noreg /* ftos */, noreg, noreg);
1087 }
1088
1089 void TemplateTable::dastore() {
1090 transition(dtos, vtos);
1091 __ pop_i(r1);
1092 __ pop_ptr(r3);
1093 // v0: value
1094 // r1: index
1095 // r3: array
1096 index_check(r3, r1); // prefer index in r1
1097 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
1098 __ access_store_at(T_DOUBLE, IN_HEAP | IN_HEAP_ARRAY, Address(r3, r1, Address::uxtw(3)), noreg /* dtos */, noreg, noreg);
1099 }
1100
1101 void TemplateTable::aastore() {
1102 Label is_null, ok_is_subtype, done;
1103 transition(vtos, vtos);
1104 // stack: ..., array, index, value
1105 __ ldr(r0, at_tos()); // value
1106 __ ldr(r2, at_tos_p1()); // index
1107 __ ldr(r3, at_tos_p2()); // array
1108
1109 Address element_address(r3, r4, Address::uxtw(LogBytesPerHeapOop));
1110
1111 index_check(r3, r2); // kills r1
1112 __ add(r4, r2, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
1113
1114 // do array store check - check for NULL value first
1115 __ cbz(r0, is_null);
1116
1117 // Move subklass into r1
1118 __ load_klass(r1, r0);
1119 // Move superklass into r0
1120 __ load_klass(r0, r3);
1121 __ ldr(r0, Address(r0,
1122 ObjArrayKlass::element_klass_offset()));
1123 // Compress array + index*oopSize + 12 into a single register. Frees r2.
1124
1125 // Generate subtype check. Blows r2, r5
1126 // Superklass in r0. Subklass in r1.
1127 __ gen_subtype_check(r1, ok_is_subtype);
1128
1129 // Come here on failure
1130 // object is at TOS
1131 __ b(Interpreter::_throw_ArrayStoreException_entry);
1132
1154 void TemplateTable::bastore()
1155 {
1156 transition(itos, vtos);
1157 __ pop_i(r1);
1158 __ pop_ptr(r3);
1159 // r0: value
1160 // r1: index
1161 // r3: array
1162 index_check(r3, r1); // prefer index in r1
1163
1164 // Need to check whether array is boolean or byte
1165 // since both types share the bastore bytecode.
1166 __ load_klass(r2, r3);
1167 __ ldrw(r2, Address(r2, Klass::layout_helper_offset()));
1168 int diffbit_index = exact_log2(Klass::layout_helper_boolean_diffbit());
1169 Label L_skip;
1170 __ tbz(r2, diffbit_index, L_skip);
1171 __ andw(r0, r0, 1); // if it is a T_BOOLEAN array, mask the stored value to 0/1
1172 __ bind(L_skip);
1173
1174 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_BYTE) >> 0);
1175 __ access_store_at(T_BYTE, IN_HEAP | IN_HEAP_ARRAY, Address(r3, r1, Address::uxtw(0)), r0, noreg, noreg);
1176 }
1177
1178 void TemplateTable::castore()
1179 {
1180 transition(itos, vtos);
1181 __ pop_i(r1);
1182 __ pop_ptr(r3);
1183 // r0: value
1184 // r1: index
1185 // r3: array
1186 index_check(r3, r1); // prefer index in r1
1187 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_CHAR) >> 1);
1188 __ access_store_at(T_CHAR, IN_HEAP | IN_HEAP_ARRAY, Address(r3, r1, Address::uxtw(1)), r0, noreg, noreg);
1189 }
1190
1191 void TemplateTable::sastore()
1192 {
1193 castore();
1194 }
1195
1196 void TemplateTable::istore(int n)
1197 {
1198 transition(itos, vtos);
1199 __ str(r0, iaddress(n));
1200 }
1201
1202 void TemplateTable::lstore(int n)
1203 {
1204 transition(ltos, vtos);
1205 __ str(r0, laddress(n));
1206 }
1207
1208 void TemplateTable::fstore(int n)
2489 __ bind(notVolatile);
2490 }
2491
2492 const Address field(obj, off);
2493
2494 Label Done, notByte, notBool, notInt, notShort, notChar,
2495 notLong, notFloat, notObj, notDouble;
2496
2497 // x86 uses a shift and mask or wings it with a shift plus assert
2498 // the mask is not needed. aarch64 just uses bitfield extract
2499 __ ubfxw(flags, raw_flags, ConstantPoolCacheEntry::tos_state_shift,
2500 ConstantPoolCacheEntry::tos_state_bits);
2501
2502 assert(btos == 0, "change code, btos != 0");
2503 __ cbnz(flags, notByte);
2504
2505 // Don't rewrite getstatic, only getfield
2506 if (is_static) rc = may_not_rewrite;
2507
2508 // btos
2509 __ access_load_at(T_BYTE, IN_HEAP, r0, field, noreg, noreg);
2510 __ push(btos);
2511 // Rewrite bytecode to be faster
2512 if (rc == may_rewrite) {
2513 patch_bytecode(Bytecodes::_fast_bgetfield, bc, r1);
2514 }
2515 __ b(Done);
2516
2517 __ bind(notByte);
2518 __ cmp(flags, ztos);
2519 __ br(Assembler::NE, notBool);
2520
2521 // ztos (same code as btos)
2522 __ access_load_at(T_BOOLEAN, IN_HEAP, r0, field, noreg, noreg);
2523 __ push(ztos);
2524 // Rewrite bytecode to be faster
2525 if (rc == may_rewrite) {
2526 // use btos rewriting, no truncating to t/f bit is needed for getfield.
2527 patch_bytecode(Bytecodes::_fast_bgetfield, bc, r1);
2528 }
2529 __ b(Done);
2530
2531 __ bind(notBool);
2532 __ cmp(flags, atos);
2533 __ br(Assembler::NE, notObj);
2534 // atos
2535 do_oop_load(_masm, field, r0, IN_HEAP);
2536 __ push(atos);
2537 if (rc == may_rewrite) {
2538 patch_bytecode(Bytecodes::_fast_agetfield, bc, r1);
2539 }
2540 __ b(Done);
2541
2542 __ bind(notObj);
2543 __ cmp(flags, itos);
2544 __ br(Assembler::NE, notInt);
2545 // itos
2546 __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
2547 __ push(itos);
2548 // Rewrite bytecode to be faster
2549 if (rc == may_rewrite) {
2550 patch_bytecode(Bytecodes::_fast_igetfield, bc, r1);
2551 }
2552 __ b(Done);
2553
2554 __ bind(notInt);
2555 __ cmp(flags, ctos);
2556 __ br(Assembler::NE, notChar);
2557 // ctos
2558 __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
2559 __ push(ctos);
2560 // Rewrite bytecode to be faster
2561 if (rc == may_rewrite) {
2562 patch_bytecode(Bytecodes::_fast_cgetfield, bc, r1);
2563 }
2564 __ b(Done);
2565
2566 __ bind(notChar);
2567 __ cmp(flags, stos);
2568 __ br(Assembler::NE, notShort);
2569 // stos
2570 __ access_load_at(T_SHORT, IN_HEAP, r0, field, noreg, noreg);
2571 __ push(stos);
2572 // Rewrite bytecode to be faster
2573 if (rc == may_rewrite) {
2574 patch_bytecode(Bytecodes::_fast_sgetfield, bc, r1);
2575 }
2576 __ b(Done);
2577
2578 __ bind(notShort);
2579 __ cmp(flags, ltos);
2580 __ br(Assembler::NE, notLong);
2581 // ltos
2582 __ access_load_at(T_LONG, IN_HEAP, r0, field, noreg, noreg);
2583 __ push(ltos);
2584 // Rewrite bytecode to be faster
2585 if (rc == may_rewrite) {
2586 patch_bytecode(Bytecodes::_fast_lgetfield, bc, r1);
2587 }
2588 __ b(Done);
2589
2590 __ bind(notLong);
2591 __ cmp(flags, ftos);
2592 __ br(Assembler::NE, notFloat);
2593 // ftos
2594 __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
2595 __ push(ftos);
2596 // Rewrite bytecode to be faster
2597 if (rc == may_rewrite) {
2598 patch_bytecode(Bytecodes::_fast_fgetfield, bc, r1);
2599 }
2600 __ b(Done);
2601
2602 __ bind(notFloat);
2603 #ifdef ASSERT
2604 __ cmp(flags, dtos);
2605 __ br(Assembler::NE, notDouble);
2606 #endif
2607 // dtos
2608 __ access_load_at(T_DOUBLE, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
2609 __ push(dtos);
2610 // Rewrite bytecode to be faster
2611 if (rc == may_rewrite) {
2612 patch_bytecode(Bytecodes::_fast_dgetfield, bc, r1);
2613 }
2614 #ifdef ASSERT
2615 __ b(Done);
2616
2617 __ bind(notDouble);
2618 __ stop("Bad state");
2619 #endif
2620
2621 __ bind(Done);
2622
2623 Label notVolatile;
2624 __ tbz(raw_flags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
2625 __ membar(MacroAssembler::LoadLoad | MacroAssembler::LoadStore);
2626 __ bind(notVolatile);
2627 }
2628
2726 // field address
2727 const Address field(obj, off);
2728
2729 Label notByte, notBool, notInt, notShort, notChar,
2730 notLong, notFloat, notObj, notDouble;
2731
2732 // x86 uses a shift and mask or wings it with a shift plus assert
2733 // the mask is not needed. aarch64 just uses bitfield extract
2734 __ ubfxw(flags, flags, ConstantPoolCacheEntry::tos_state_shift, ConstantPoolCacheEntry::tos_state_bits);
2735
2736 assert(btos == 0, "change code, btos != 0");
2737 __ cbnz(flags, notByte);
2738
2739 // Don't rewrite putstatic, only putfield
2740 if (is_static) rc = may_not_rewrite;
2741
2742 // btos
2743 {
2744 __ pop(btos);
2745 if (!is_static) pop_and_check_object(obj);
2746 __ access_store_at(T_BYTE, IN_HEAP, field, r0, noreg, noreg);
2747 if (rc == may_rewrite) {
2748 patch_bytecode(Bytecodes::_fast_bputfield, bc, r1, true, byte_no);
2749 }
2750 __ b(Done);
2751 }
2752
2753 __ bind(notByte);
2754 __ cmp(flags, ztos);
2755 __ br(Assembler::NE, notBool);
2756
2757 // ztos
2758 {
2759 __ pop(ztos);
2760 if (!is_static) pop_and_check_object(obj);
2761 __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg);
2762 if (rc == may_rewrite) {
2763 patch_bytecode(Bytecodes::_fast_zputfield, bc, r1, true, byte_no);
2764 }
2765 __ b(Done);
2766 }
2767
2768 __ bind(notBool);
2769 __ cmp(flags, atos);
2770 __ br(Assembler::NE, notObj);
2771
2772 // atos
2773 {
2774 __ pop(atos);
2775 if (!is_static) pop_and_check_object(obj);
2776 // Store into the field
2777 do_oop_store(_masm, field, r0, IN_HEAP);
2778 if (rc == may_rewrite) {
2779 patch_bytecode(Bytecodes::_fast_aputfield, bc, r1, true, byte_no);
2780 }
2781 __ b(Done);
2782 }
2783
2784 __ bind(notObj);
2785 __ cmp(flags, itos);
2786 __ br(Assembler::NE, notInt);
2787
2788 // itos
2789 {
2790 __ pop(itos);
2791 if (!is_static) pop_and_check_object(obj);
2792 __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg);
2793 if (rc == may_rewrite) {
2794 patch_bytecode(Bytecodes::_fast_iputfield, bc, r1, true, byte_no);
2795 }
2796 __ b(Done);
2797 }
2798
2799 __ bind(notInt);
2800 __ cmp(flags, ctos);
2801 __ br(Assembler::NE, notChar);
2802
2803 // ctos
2804 {
2805 __ pop(ctos);
2806 if (!is_static) pop_and_check_object(obj);
2807 __ access_store_at(T_CHAR, IN_HEAP, field, r0, noreg, noreg);
2808 if (rc == may_rewrite) {
2809 patch_bytecode(Bytecodes::_fast_cputfield, bc, r1, true, byte_no);
2810 }
2811 __ b(Done);
2812 }
2813
2814 __ bind(notChar);
2815 __ cmp(flags, stos);
2816 __ br(Assembler::NE, notShort);
2817
2818 // stos
2819 {
2820 __ pop(stos);
2821 if (!is_static) pop_and_check_object(obj);
2822 __ access_store_at(T_SHORT, IN_HEAP, field, r0, noreg, noreg);
2823 if (rc == may_rewrite) {
2824 patch_bytecode(Bytecodes::_fast_sputfield, bc, r1, true, byte_no);
2825 }
2826 __ b(Done);
2827 }
2828
2829 __ bind(notShort);
2830 __ cmp(flags, ltos);
2831 __ br(Assembler::NE, notLong);
2832
2833 // ltos
2834 {
2835 __ pop(ltos);
2836 if (!is_static) pop_and_check_object(obj);
2837 __ access_store_at(T_LONG, IN_HEAP, field, r0, noreg, noreg);
2838 if (rc == may_rewrite) {
2839 patch_bytecode(Bytecodes::_fast_lputfield, bc, r1, true, byte_no);
2840 }
2841 __ b(Done);
2842 }
2843
2844 __ bind(notLong);
2845 __ cmp(flags, ftos);
2846 __ br(Assembler::NE, notFloat);
2847
2848 // ftos
2849 {
2850 __ pop(ftos);
2851 if (!is_static) pop_and_check_object(obj);
2852 __ access_store_at(T_FLOAT, IN_HEAP, field, noreg /* ftos */, noreg, noreg);
2853 if (rc == may_rewrite) {
2854 patch_bytecode(Bytecodes::_fast_fputfield, bc, r1, true, byte_no);
2855 }
2856 __ b(Done);
2857 }
2858
2859 __ bind(notFloat);
2860 #ifdef ASSERT
2861 __ cmp(flags, dtos);
2862 __ br(Assembler::NE, notDouble);
2863 #endif
2864
2865 // dtos
2866 {
2867 __ pop(dtos);
2868 if (!is_static) pop_and_check_object(obj);
2869 __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* dtos */, noreg, noreg);
2870 if (rc == may_rewrite) {
2871 patch_bytecode(Bytecodes::_fast_dputfield, bc, r1, true, byte_no);
2872 }
2873 }
2874
2875 #ifdef ASSERT
2876 __ b(Done);
2877
2878 __ bind(notDouble);
2879 __ stop("Bad state");
2880 #endif
2881
2882 __ bind(Done);
2883
2884 {
2885 Label notVolatile;
2886 __ tbz(r5, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
2887 __ membar(MacroAssembler::StoreLoad);
2888 __ bind(notVolatile);
2889 }
2980 Label notVolatile;
2981 __ tbz(r3, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
2982 __ membar(MacroAssembler::StoreStore);
2983 __ bind(notVolatile);
2984 }
2985
2986 Label notVolatile;
2987
2988 // Get object from stack
2989 pop_and_check_object(r2);
2990
2991 // field address
2992 const Address field(r2, r1);
2993
2994 // access field
2995 switch (bytecode()) {
2996 case Bytecodes::_fast_aputfield:
2997 do_oop_store(_masm, field, r0, IN_HEAP);
2998 break;
2999 case Bytecodes::_fast_lputfield:
3000 __ access_store_at(T_LONG, IN_HEAP, field, r0, noreg, noreg);
3001 break;
3002 case Bytecodes::_fast_iputfield:
3003 __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg);
3004 break;
3005 case Bytecodes::_fast_zputfield:
3006 __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg);
3007 break;
3008 case Bytecodes::_fast_bputfield:
3009 __ access_store_at(T_BYTE, IN_HEAP, field, r0, noreg, noreg);
3010 break;
3011 case Bytecodes::_fast_sputfield:
3012 __ access_store_at(T_SHORT, IN_HEAP, field, r0, noreg, noreg);
3013 break;
3014 case Bytecodes::_fast_cputfield:
3015 __ access_store_at(T_CHAR, IN_HEAP, field, r0, noreg, noreg);
3016 break;
3017 case Bytecodes::_fast_fputfield:
3018 __ access_store_at(T_FLOAT, IN_HEAP, field, noreg /* ftos */, noreg, noreg);
3019 break;
3020 case Bytecodes::_fast_dputfield:
3021 __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* dtos */, noreg, noreg);
3022 break;
3023 default:
3024 ShouldNotReachHere();
3025 }
3026
3027 {
3028 Label notVolatile;
3029 __ tbz(r3, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3030 __ membar(MacroAssembler::StoreLoad);
3031 __ bind(notVolatile);
3032 }
3033 }
3034
3035
3036 void TemplateTable::fast_accessfield(TosState state)
3037 {
3038 transition(atos, state);
3039 // Do the JVMTI work here to avoid disturbing the register state below
3040 if (JvmtiExport::can_post_field_access()) {
3041 // Check to see if a field access watch has been set before we
3074 // 8179954: We need to make sure that the code generated for
3075 // volatile accesses forms a sequentially-consistent set of
3076 // operations when combined with STLR and LDAR. Without a leading
3077 // membar it's possible for a simple Dekker test to fail if loads
3078 // use LDR;DMB but stores use STLR. This can happen if C2 compiles
3079 // the stores in one method and we interpret the loads in another.
3080 if (! UseBarriersForVolatile) {
3081 Label notVolatile;
3082 __ tbz(r3, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3083 __ membar(MacroAssembler::AnyAny);
3084 __ bind(notVolatile);
3085 }
3086
3087 // access field
3088 switch (bytecode()) {
3089 case Bytecodes::_fast_agetfield:
3090 do_oop_load(_masm, field, r0, IN_HEAP);
3091 __ verify_oop(r0);
3092 break;
3093 case Bytecodes::_fast_lgetfield:
3094 __ access_load_at(T_LONG, IN_HEAP, r0, field, noreg, noreg);
3095 break;
3096 case Bytecodes::_fast_igetfield:
3097 __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
3098 break;
3099 case Bytecodes::_fast_bgetfield:
3100 __ access_load_at(T_BYTE, IN_HEAP, r0, field, noreg, noreg);
3101 break;
3102 case Bytecodes::_fast_sgetfield:
3103 __ access_load_at(T_SHORT, IN_HEAP, r0, field, noreg, noreg);
3104 break;
3105 case Bytecodes::_fast_cgetfield:
3106 __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
3107 break;
3108 case Bytecodes::_fast_fgetfield:
3109 __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
3110 break;
3111 case Bytecodes::_fast_dgetfield:
3112 __ access_load_at(T_DOUBLE, IN_HEAP, noreg /* dtos */, field, noreg, noreg);
3113 break;
3114 default:
3115 ShouldNotReachHere();
3116 }
3117 {
3118 Label notVolatile;
3119 __ tbz(r3, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3120 __ membar(MacroAssembler::LoadLoad | MacroAssembler::LoadStore);
3121 __ bind(notVolatile);
3122 }
3123 }
3124
3125 void TemplateTable::fast_xaccess(TosState state)
3126 {
3127 transition(vtos, state);
3128
3129 // get receiver
3130 __ ldr(r0, aaddress(0));
3131 // access constant pool cache
3132 __ get_cache_and_index_at_bcp(r2, r3, 2);
3137 // volatile accesses forms a sequentially-consistent set of
3138 // operations when combined with STLR and LDAR. Without a leading
3139 // membar it's possible for a simple Dekker test to fail if loads
3140 // use LDR;DMB but stores use STLR. This can happen if C2 compiles
3141 // the stores in one method and we interpret the loads in another.
3142 if (! UseBarriersForVolatile) {
3143 Label notVolatile;
3144 __ ldrw(r3, Address(r2, in_bytes(ConstantPoolCache::base_offset() +
3145 ConstantPoolCacheEntry::flags_offset())));
3146 __ tbz(r3, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3147 __ membar(MacroAssembler::AnyAny);
3148 __ bind(notVolatile);
3149 }
3150
3151 // make sure exception is reported in correct bcp range (getfield is
3152 // next instruction)
3153 __ increment(rbcp);
3154 __ null_check(r0);
3155 switch (state) {
3156 case itos:
3157 __ access_load_at(T_INT, IN_HEAP, r0, Address(r0, r1, Address::lsl(0)), noreg, noreg);
3158 break;
3159 case atos:
3160 do_oop_load(_masm, Address(r0, r1, Address::lsl(0)), r0, IN_HEAP);
3161 __ verify_oop(r0);
3162 break;
3163 case ftos:
3164 __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, Address(r0, r1, Address::lsl(0)), noreg, noreg);
3165 break;
3166 default:
3167 ShouldNotReachHere();
3168 }
3169
3170 {
3171 Label notVolatile;
3172 __ ldrw(r3, Address(r2, in_bytes(ConstantPoolCache::base_offset() +
3173 ConstantPoolCacheEntry::flags_offset())));
3174 __ tbz(r3, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3175 __ membar(MacroAssembler::LoadLoad | MacroAssembler::LoadStore);
3176 __ bind(notVolatile);
3177 }
3178
3179 __ decrement(rbcp);
3180 }
3181
3182
3183
3184 //-----------------------------------------------------------------------------
|