880
881 // 4-byte accesses only! Don't use it to access 8 bytes!
882 Address LIR_Assembler::as_Address_lo(LIR_Address* addr) {
883 ShouldNotCallThis();
884 return 0; // unused
885 }
886
887 void LIR_Assembler::mem2reg(LIR_Opr src_opr, LIR_Opr dest, BasicType type, LIR_PatchCode patch_code,
888 CodeEmitInfo* info, bool wide, bool unaligned) {
889
890 assert(type != T_METADATA, "load of metadata ptr not supported");
891 LIR_Address* addr = src_opr->as_address_ptr();
892 LIR_Opr to_reg = dest;
893
894 Register src = addr->base()->as_pointer_register();
895 Register disp_reg = Z_R0;
896 int disp_value = addr->disp();
897 bool needs_patching = (patch_code != lir_patch_none);
898
899 if (addr->base()->type() == T_OBJECT) {
900 __ verify_oop(src);
901 }
902
903 PatchingStub* patch = NULL;
904 if (needs_patching) {
905 patch = new PatchingStub(_masm, PatchingStub::access_field_id);
906 assert(!to_reg->is_double_cpu() ||
907 patch_code == lir_patch_none ||
908 patch_code == lir_patch_normal, "patching doesn't match register");
909 }
910
911 if (addr->index()->is_illegal()) {
912 if (!Immediate::is_simm20(disp_value)) {
913 if (needs_patching) {
914 __ load_const(Z_R1_scratch, (intptr_t)0);
915 } else {
916 __ load_const_optimized(Z_R1_scratch, disp_value);
917 }
918 disp_reg = Z_R1_scratch;
919 disp_value = 0;
920 }
955 __ z_ly(dest->as_register(), disp_value, disp_reg, src);
956 }
957 break;
958 case T_ADDRESS:
959 if (UseCompressedClassPointers && addr->disp() == oopDesc::klass_offset_in_bytes()) {
960 __ z_llgf(dest->as_register(), disp_value, disp_reg, src);
961 __ decode_klass_not_null(dest->as_register());
962 } else {
963 __ z_lg(dest->as_register(), disp_value, disp_reg, src);
964 }
965 break;
966 case T_ARRAY : // fall through
967 case T_OBJECT:
968 {
969 if (UseCompressedOops && !wide) {
970 __ z_llgf(dest->as_register(), disp_value, disp_reg, src);
971 __ oop_decoder(dest->as_register(), dest->as_register(), true);
972 } else {
973 __ z_lg(dest->as_register(), disp_value, disp_reg, src);
974 }
975 __ verify_oop(dest->as_register());
976 break;
977 }
978 case T_FLOAT:
979 if (short_disp) {
980 __ z_le(dest->as_float_reg(), disp_value, disp_reg, src);
981 } else {
982 __ z_ley(dest->as_float_reg(), disp_value, disp_reg, src);
983 }
984 break;
985 case T_DOUBLE:
986 if (short_disp) {
987 __ z_ld(dest->as_double_reg(), disp_value, disp_reg, src);
988 } else {
989 __ z_ldy(dest->as_double_reg(), disp_value, disp_reg, src);
990 }
991 break;
992 case T_LONG : __ z_lg(dest->as_register_lo(), disp_value, disp_reg, src); break;
993 default : ShouldNotReachHere();
994 }
995
996 if (patch != NULL) {
997 patching_epilog(patch, patch_code, src, info);
998 }
999 if (info != NULL) add_debug_info_for_null_check(offset, info);
1000 }
1001
1002 void LIR_Assembler::stack2reg(LIR_Opr src, LIR_Opr dest, BasicType type) {
1003 assert(src->is_stack(), "should not call otherwise");
1004 assert(dest->is_register(), "should not call otherwise");
1005
1006 if (dest->is_single_cpu()) {
1007 if (is_reference_type(type)) {
1008 __ mem2reg_opt(dest->as_register(), frame_map()->address_for_slot(src->single_stack_ix()), true);
1009 __ verify_oop(dest->as_register());
1010 } else if (type == T_METADATA || type == T_ADDRESS) {
1011 __ mem2reg_opt(dest->as_register(), frame_map()->address_for_slot(src->single_stack_ix()), true);
1012 } else {
1013 __ mem2reg_opt(dest->as_register(), frame_map()->address_for_slot(src->single_stack_ix()), false);
1014 }
1015 } else if (dest->is_double_cpu()) {
1016 Address src_addr_LO = frame_map()->address_for_slot(src->double_stack_ix());
1017 __ mem2reg_opt(dest->as_register_lo(), src_addr_LO, true);
1018 } else if (dest->is_single_fpu()) {
1019 Address src_addr = frame_map()->address_for_slot(src->single_stack_ix());
1020 __ mem2freg_opt(dest->as_float_reg(), src_addr, false);
1021 } else if (dest->is_double_fpu()) {
1022 Address src_addr = frame_map()->address_for_slot(src->double_stack_ix());
1023 __ mem2freg_opt(dest->as_double_reg(), src_addr, true);
1024 } else {
1025 ShouldNotReachHere();
1026 }
1027 }
1028
1029 void LIR_Assembler::reg2stack(LIR_Opr src, LIR_Opr dest, BasicType type, bool pop_fpu_stack) {
1030 assert(src->is_register(), "should not call otherwise");
1031 assert(dest->is_stack(), "should not call otherwise");
1032
1033 if (src->is_single_cpu()) {
1034 const Address dst = frame_map()->address_for_slot(dest->single_stack_ix());
1035 if (is_reference_type(type)) {
1036 __ verify_oop(src->as_register());
1037 __ reg2mem_opt(src->as_register(), dst, true);
1038 } else if (type == T_METADATA || type == T_ADDRESS) {
1039 __ reg2mem_opt(src->as_register(), dst, true);
1040 } else {
1041 __ reg2mem_opt(src->as_register(), dst, false);
1042 }
1043 } else if (src->is_double_cpu()) {
1044 Address dstLO = frame_map()->address_for_slot(dest->double_stack_ix());
1045 __ reg2mem_opt(src->as_register_lo(), dstLO, true);
1046 } else if (src->is_single_fpu()) {
1047 Address dst_addr = frame_map()->address_for_slot(dest->single_stack_ix());
1048 __ freg2mem_opt(src->as_float_reg(), dst_addr, false);
1049 } else if (src->is_double_fpu()) {
1050 Address dst_addr = frame_map()->address_for_slot(dest->double_stack_ix());
1051 __ freg2mem_opt(src->as_double_reg(), dst_addr, true);
1052 } else {
1053 ShouldNotReachHere();
1054 }
1055 }
1056
1062 __ z_ldr(to_reg->as_double_reg(), from_reg->as_double_reg());
1063 } else {
1064 // float to float moves
1065 assert(to_reg->is_single_fpu(), "should match");
1066 __ z_ler(to_reg->as_float_reg(), from_reg->as_float_reg());
1067 }
1068 } else if (!from_reg->is_float_kind() && !to_reg->is_float_kind()) {
1069 if (from_reg->is_double_cpu()) {
1070 __ z_lgr(to_reg->as_pointer_register(), from_reg->as_pointer_register());
1071 } else if (to_reg->is_double_cpu()) {
1072 // int to int moves
1073 __ z_lgr(to_reg->as_register_lo(), from_reg->as_register());
1074 } else {
1075 // int to int moves
1076 __ z_lgr(to_reg->as_register(), from_reg->as_register());
1077 }
1078 } else {
1079 ShouldNotReachHere();
1080 }
1081 if (is_reference_type(to_reg->type())) {
1082 __ verify_oop(to_reg->as_register());
1083 }
1084 }
1085
1086 void LIR_Assembler::reg2mem(LIR_Opr from, LIR_Opr dest_opr, BasicType type,
1087 LIR_PatchCode patch_code, CodeEmitInfo* info, bool pop_fpu_stack,
1088 bool wide, bool unaligned) {
1089 assert(type != T_METADATA, "store of metadata ptr not supported");
1090 LIR_Address* addr = dest_opr->as_address_ptr();
1091
1092 Register dest = addr->base()->as_pointer_register();
1093 Register disp_reg = Z_R0;
1094 int disp_value = addr->disp();
1095 bool needs_patching = (patch_code != lir_patch_none);
1096
1097 if (addr->base()->is_oop_register()) {
1098 __ verify_oop(dest);
1099 }
1100
1101 PatchingStub* patch = NULL;
1102 if (needs_patching) {
1103 patch = new PatchingStub(_masm, PatchingStub::access_field_id);
1104 assert(!from->is_double_cpu() ||
1105 patch_code == lir_patch_none ||
1106 patch_code == lir_patch_normal, "patching doesn't match register");
1107 }
1108
1109 assert(!needs_patching || (!Immediate::is_simm20(disp_value) && addr->index()->is_illegal()), "assumption");
1110 if (addr->index()->is_illegal()) {
1111 if (!Immediate::is_simm20(disp_value)) {
1112 if (needs_patching) {
1113 __ load_const(Z_R1_scratch, (intptr_t)0);
1114 } else {
1115 __ load_const_optimized(Z_R1_scratch, disp_value);
1116 }
1117 disp_reg = Z_R1_scratch;
1118 disp_value = 0;
1119 }
1120 } else {
1121 if (!Immediate::is_simm20(disp_value)) {
1122 __ load_const_optimized(Z_R1_scratch, disp_value);
1123 __ z_la(Z_R1_scratch, 0, Z_R1_scratch, addr->index()->as_register());
1124 disp_reg = Z_R1_scratch;
1125 disp_value = 0;
1126 }
1127 disp_reg = addr->index()->as_pointer_register();
1128 }
1129
1130 assert(disp_reg != Z_R0 || Immediate::is_simm20(disp_value), "should have set this up");
1131
1132 if (is_reference_type(type)) {
1133 __ verify_oop(from->as_register());
1134 }
1135
1136 bool short_disp = Immediate::is_uimm12(disp_value);
1137
1138 // Remember the offset of the store. The patching_epilog must be done
1139 // before the call to add_debug_info_for_null_check, otherwise the PcDescs don't get
1140 // entered in increasing order.
1141 int offset = code_offset();
1142 switch (type) {
1143 case T_BOOLEAN: // fall through
1144 case T_BYTE :
1145 if (short_disp) {
1146 __ z_stc(from->as_register(), disp_value, disp_reg, dest);
1147 } else {
1148 __ z_stcy(from->as_register(), disp_value, disp_reg, dest);
1149 }
1150 break;
1151 case T_CHAR : // fall through
1152 case T_SHORT :
1153 if (short_disp) {
2395 if (op->init_check()) {
2396 // Make sure klass is initialized & doesn't have finalizer.
2397 const int state_offset = in_bytes(InstanceKlass::init_state_offset());
2398 Register iklass = op->klass()->as_register();
2399 add_debug_info_for_null_check_here(op->stub()->info());
2400 if (Immediate::is_uimm12(state_offset)) {
2401 __ z_cli(state_offset, iklass, InstanceKlass::fully_initialized);
2402 } else {
2403 __ z_cliy(state_offset, iklass, InstanceKlass::fully_initialized);
2404 }
2405 __ branch_optimized(Assembler::bcondNotEqual, *op->stub()->entry()); // Use long branch, because slow_case might be far.
2406 }
2407 __ allocate_object(op->obj()->as_register(),
2408 op->tmp1()->as_register(),
2409 op->tmp2()->as_register(),
2410 op->header_size(),
2411 op->object_size(),
2412 op->klass()->as_register(),
2413 *op->stub()->entry());
2414 __ bind(*op->stub()->continuation());
2415 __ verify_oop(op->obj()->as_register());
2416 }
2417
2418 void LIR_Assembler::emit_alloc_array(LIR_OpAllocArray* op) {
2419 Register len = op->len()->as_register();
2420 __ move_reg_if_needed(len, T_LONG, len, T_INT); // sign extend
2421
2422 if (UseSlowPath ||
2423 (!UseFastNewObjectArray && (is_reference_type(op->type()))) ||
2424 (!UseFastNewTypeArray && (!is_reference_type(op->type())))) {
2425 __ z_brul(*op->stub()->entry());
2426 } else {
2427 __ allocate_array(op->obj()->as_register(),
2428 op->len()->as_register(),
2429 op->tmp1()->as_register(),
2430 op->tmp2()->as_register(),
2431 arrayOopDesc::header_size(op->type()),
2432 type2aelembytes(op->type()),
2433 op->klass()->as_register(),
2434 *op->stub()->entry());
2435 }
2531 __ or2mem_8(data_addr, header_bits);
2532 __ branch_optimized(Assembler::bcondAlways, *obj_is_null);
2533 __ bind(not_null);
2534 } else {
2535 __ compareU64_and_branch(obj, (intptr_t) 0, Assembler::bcondEqual, *obj_is_null);
2536 }
2537
2538 NearLabel profile_cast_failure, profile_cast_success;
2539 Label *failure_target = op->should_profile() ? &profile_cast_failure : failure;
2540 Label *success_target = op->should_profile() ? &profile_cast_success : success;
2541
2542 // Patching may screw with our temporaries on sparc,
2543 // so let's do it before loading the class.
2544 if (k->is_loaded()) {
2545 metadata2reg(k->constant_encoding(), k_RInfo);
2546 } else {
2547 klass2reg_with_patching(k_RInfo, op->info_for_patch());
2548 }
2549 assert(obj != k_RInfo, "must be different");
2550
2551 __ verify_oop(obj);
2552
2553 // Get object class.
2554 // Not a safepoint as obj null check happens earlier.
2555 if (op->fast_check()) {
2556 if (UseCompressedClassPointers) {
2557 __ load_klass(klass_RInfo, obj);
2558 __ compareU64_and_branch(k_RInfo, klass_RInfo, Assembler::bcondNotEqual, *failure_target);
2559 } else {
2560 __ z_cg(k_RInfo, Address(obj, oopDesc::klass_offset_in_bytes()));
2561 __ branch_optimized(Assembler::bcondNotEqual, *failure_target);
2562 }
2563 // Successful cast, fall through to profile or jump.
2564 } else {
2565 bool need_slow_path = !k->is_loaded() ||
2566 ((int) k->super_check_offset() == in_bytes(Klass::secondary_super_cache_offset()));
2567 intptr_t super_check_offset = k->is_loaded() ? k->super_check_offset() : -1L;
2568 __ load_klass(klass_RInfo, obj);
2569 // Perform the fast part of the checking logic.
2570 __ check_klass_subtype_fast_path(klass_RInfo, k_RInfo, Rtmp1,
2571 (need_slow_path ? success_target : NULL),
2992
2993 void LIR_Assembler::emit_profile_type(LIR_OpProfileType* op) {
2994 Register obj = op->obj()->as_register();
2995 Register tmp1 = op->tmp()->as_pointer_register();
2996 Register tmp2 = Z_R1_scratch;
2997 Address mdo_addr = as_Address(op->mdp()->as_address_ptr());
2998 ciKlass* exact_klass = op->exact_klass();
2999 intptr_t current_klass = op->current_klass();
3000 bool not_null = op->not_null();
3001 bool no_conflict = op->no_conflict();
3002
3003 Label update, next, none, null_seen, init_klass;
3004
3005 bool do_null = !not_null;
3006 bool exact_klass_set = exact_klass != NULL && ciTypeEntries::valid_ciklass(current_klass) == exact_klass;
3007 bool do_update = !TypeEntries::is_type_unknown(current_klass) && !exact_klass_set;
3008
3009 assert(do_null || do_update, "why are we here?");
3010 assert(!TypeEntries::was_null_seen(current_klass) || do_update, "why are we here?");
3011
3012 __ verify_oop(obj);
3013
3014 if (do_null || tmp1 != obj DEBUG_ONLY(|| true)) {
3015 __ z_ltgr(tmp1, obj);
3016 }
3017 if (do_null) {
3018 __ z_brnz(update);
3019 if (!TypeEntries::was_null_seen(current_klass)) {
3020 __ z_lg(tmp1, mdo_addr);
3021 __ z_oill(tmp1, TypeEntries::null_seen);
3022 __ z_stg(tmp1, mdo_addr);
3023 }
3024 if (do_update) {
3025 __ z_bru(next);
3026 }
3027 } else {
3028 __ asm_assert_ne("unexpect null obj", __LINE__);
3029 }
3030
3031 __ bind(update);
3032
|
880
881 // 4-byte accesses only! Don't use it to access 8 bytes!
882 Address LIR_Assembler::as_Address_lo(LIR_Address* addr) {
883 ShouldNotCallThis();
884 return 0; // unused
885 }
886
887 void LIR_Assembler::mem2reg(LIR_Opr src_opr, LIR_Opr dest, BasicType type, LIR_PatchCode patch_code,
888 CodeEmitInfo* info, bool wide, bool unaligned) {
889
890 assert(type != T_METADATA, "load of metadata ptr not supported");
891 LIR_Address* addr = src_opr->as_address_ptr();
892 LIR_Opr to_reg = dest;
893
894 Register src = addr->base()->as_pointer_register();
895 Register disp_reg = Z_R0;
896 int disp_value = addr->disp();
897 bool needs_patching = (patch_code != lir_patch_none);
898
899 if (addr->base()->type() == T_OBJECT) {
900 __ verify_oop(src, FILE_AND_LINE);
901 }
902
903 PatchingStub* patch = NULL;
904 if (needs_patching) {
905 patch = new PatchingStub(_masm, PatchingStub::access_field_id);
906 assert(!to_reg->is_double_cpu() ||
907 patch_code == lir_patch_none ||
908 patch_code == lir_patch_normal, "patching doesn't match register");
909 }
910
911 if (addr->index()->is_illegal()) {
912 if (!Immediate::is_simm20(disp_value)) {
913 if (needs_patching) {
914 __ load_const(Z_R1_scratch, (intptr_t)0);
915 } else {
916 __ load_const_optimized(Z_R1_scratch, disp_value);
917 }
918 disp_reg = Z_R1_scratch;
919 disp_value = 0;
920 }
955 __ z_ly(dest->as_register(), disp_value, disp_reg, src);
956 }
957 break;
958 case T_ADDRESS:
959 if (UseCompressedClassPointers && addr->disp() == oopDesc::klass_offset_in_bytes()) {
960 __ z_llgf(dest->as_register(), disp_value, disp_reg, src);
961 __ decode_klass_not_null(dest->as_register());
962 } else {
963 __ z_lg(dest->as_register(), disp_value, disp_reg, src);
964 }
965 break;
966 case T_ARRAY : // fall through
967 case T_OBJECT:
968 {
969 if (UseCompressedOops && !wide) {
970 __ z_llgf(dest->as_register(), disp_value, disp_reg, src);
971 __ oop_decoder(dest->as_register(), dest->as_register(), true);
972 } else {
973 __ z_lg(dest->as_register(), disp_value, disp_reg, src);
974 }
975 __ verify_oop(dest->as_register(), FILE_AND_LINE);
976 break;
977 }
978 case T_FLOAT:
979 if (short_disp) {
980 __ z_le(dest->as_float_reg(), disp_value, disp_reg, src);
981 } else {
982 __ z_ley(dest->as_float_reg(), disp_value, disp_reg, src);
983 }
984 break;
985 case T_DOUBLE:
986 if (short_disp) {
987 __ z_ld(dest->as_double_reg(), disp_value, disp_reg, src);
988 } else {
989 __ z_ldy(dest->as_double_reg(), disp_value, disp_reg, src);
990 }
991 break;
992 case T_LONG : __ z_lg(dest->as_register_lo(), disp_value, disp_reg, src); break;
993 default : ShouldNotReachHere();
994 }
995
996 if (patch != NULL) {
997 patching_epilog(patch, patch_code, src, info);
998 }
999 if (info != NULL) add_debug_info_for_null_check(offset, info);
1000 }
1001
1002 void LIR_Assembler::stack2reg(LIR_Opr src, LIR_Opr dest, BasicType type) {
1003 assert(src->is_stack(), "should not call otherwise");
1004 assert(dest->is_register(), "should not call otherwise");
1005
1006 if (dest->is_single_cpu()) {
1007 if (is_reference_type(type)) {
1008 __ mem2reg_opt(dest->as_register(), frame_map()->address_for_slot(src->single_stack_ix()), true);
1009 __ verify_oop(dest->as_register(), FILE_AND_LINE);
1010 } else if (type == T_METADATA || type == T_ADDRESS) {
1011 __ mem2reg_opt(dest->as_register(), frame_map()->address_for_slot(src->single_stack_ix()), true);
1012 } else {
1013 __ mem2reg_opt(dest->as_register(), frame_map()->address_for_slot(src->single_stack_ix()), false);
1014 }
1015 } else if (dest->is_double_cpu()) {
1016 Address src_addr_LO = frame_map()->address_for_slot(src->double_stack_ix());
1017 __ mem2reg_opt(dest->as_register_lo(), src_addr_LO, true);
1018 } else if (dest->is_single_fpu()) {
1019 Address src_addr = frame_map()->address_for_slot(src->single_stack_ix());
1020 __ mem2freg_opt(dest->as_float_reg(), src_addr, false);
1021 } else if (dest->is_double_fpu()) {
1022 Address src_addr = frame_map()->address_for_slot(src->double_stack_ix());
1023 __ mem2freg_opt(dest->as_double_reg(), src_addr, true);
1024 } else {
1025 ShouldNotReachHere();
1026 }
1027 }
1028
1029 void LIR_Assembler::reg2stack(LIR_Opr src, LIR_Opr dest, BasicType type, bool pop_fpu_stack) {
1030 assert(src->is_register(), "should not call otherwise");
1031 assert(dest->is_stack(), "should not call otherwise");
1032
1033 if (src->is_single_cpu()) {
1034 const Address dst = frame_map()->address_for_slot(dest->single_stack_ix());
1035 if (is_reference_type(type)) {
1036 __ verify_oop(src->as_register(), FILE_AND_LINE);
1037 __ reg2mem_opt(src->as_register(), dst, true);
1038 } else if (type == T_METADATA || type == T_ADDRESS) {
1039 __ reg2mem_opt(src->as_register(), dst, true);
1040 } else {
1041 __ reg2mem_opt(src->as_register(), dst, false);
1042 }
1043 } else if (src->is_double_cpu()) {
1044 Address dstLO = frame_map()->address_for_slot(dest->double_stack_ix());
1045 __ reg2mem_opt(src->as_register_lo(), dstLO, true);
1046 } else if (src->is_single_fpu()) {
1047 Address dst_addr = frame_map()->address_for_slot(dest->single_stack_ix());
1048 __ freg2mem_opt(src->as_float_reg(), dst_addr, false);
1049 } else if (src->is_double_fpu()) {
1050 Address dst_addr = frame_map()->address_for_slot(dest->double_stack_ix());
1051 __ freg2mem_opt(src->as_double_reg(), dst_addr, true);
1052 } else {
1053 ShouldNotReachHere();
1054 }
1055 }
1056
1062 __ z_ldr(to_reg->as_double_reg(), from_reg->as_double_reg());
1063 } else {
1064 // float to float moves
1065 assert(to_reg->is_single_fpu(), "should match");
1066 __ z_ler(to_reg->as_float_reg(), from_reg->as_float_reg());
1067 }
1068 } else if (!from_reg->is_float_kind() && !to_reg->is_float_kind()) {
1069 if (from_reg->is_double_cpu()) {
1070 __ z_lgr(to_reg->as_pointer_register(), from_reg->as_pointer_register());
1071 } else if (to_reg->is_double_cpu()) {
1072 // int to int moves
1073 __ z_lgr(to_reg->as_register_lo(), from_reg->as_register());
1074 } else {
1075 // int to int moves
1076 __ z_lgr(to_reg->as_register(), from_reg->as_register());
1077 }
1078 } else {
1079 ShouldNotReachHere();
1080 }
1081 if (is_reference_type(to_reg->type())) {
1082 __ verify_oop(to_reg->as_register(), FILE_AND_LINE);
1083 }
1084 }
1085
1086 void LIR_Assembler::reg2mem(LIR_Opr from, LIR_Opr dest_opr, BasicType type,
1087 LIR_PatchCode patch_code, CodeEmitInfo* info, bool pop_fpu_stack,
1088 bool wide, bool unaligned) {
1089 assert(type != T_METADATA, "store of metadata ptr not supported");
1090 LIR_Address* addr = dest_opr->as_address_ptr();
1091
1092 Register dest = addr->base()->as_pointer_register();
1093 Register disp_reg = Z_R0;
1094 int disp_value = addr->disp();
1095 bool needs_patching = (patch_code != lir_patch_none);
1096
1097 if (addr->base()->is_oop_register()) {
1098 __ verify_oop(dest, FILE_AND_LINE);
1099 }
1100
1101 PatchingStub* patch = NULL;
1102 if (needs_patching) {
1103 patch = new PatchingStub(_masm, PatchingStub::access_field_id);
1104 assert(!from->is_double_cpu() ||
1105 patch_code == lir_patch_none ||
1106 patch_code == lir_patch_normal, "patching doesn't match register");
1107 }
1108
1109 assert(!needs_patching || (!Immediate::is_simm20(disp_value) && addr->index()->is_illegal()), "assumption");
1110 if (addr->index()->is_illegal()) {
1111 if (!Immediate::is_simm20(disp_value)) {
1112 if (needs_patching) {
1113 __ load_const(Z_R1_scratch, (intptr_t)0);
1114 } else {
1115 __ load_const_optimized(Z_R1_scratch, disp_value);
1116 }
1117 disp_reg = Z_R1_scratch;
1118 disp_value = 0;
1119 }
1120 } else {
1121 if (!Immediate::is_simm20(disp_value)) {
1122 __ load_const_optimized(Z_R1_scratch, disp_value);
1123 __ z_la(Z_R1_scratch, 0, Z_R1_scratch, addr->index()->as_register());
1124 disp_reg = Z_R1_scratch;
1125 disp_value = 0;
1126 }
1127 disp_reg = addr->index()->as_pointer_register();
1128 }
1129
1130 assert(disp_reg != Z_R0 || Immediate::is_simm20(disp_value), "should have set this up");
1131
1132 if (is_reference_type(type)) {
1133 __ verify_oop(from->as_register(), FILE_AND_LINE);
1134 }
1135
1136 bool short_disp = Immediate::is_uimm12(disp_value);
1137
1138 // Remember the offset of the store. The patching_epilog must be done
1139 // before the call to add_debug_info_for_null_check, otherwise the PcDescs don't get
1140 // entered in increasing order.
1141 int offset = code_offset();
1142 switch (type) {
1143 case T_BOOLEAN: // fall through
1144 case T_BYTE :
1145 if (short_disp) {
1146 __ z_stc(from->as_register(), disp_value, disp_reg, dest);
1147 } else {
1148 __ z_stcy(from->as_register(), disp_value, disp_reg, dest);
1149 }
1150 break;
1151 case T_CHAR : // fall through
1152 case T_SHORT :
1153 if (short_disp) {
2395 if (op->init_check()) {
2396 // Make sure klass is initialized & doesn't have finalizer.
2397 const int state_offset = in_bytes(InstanceKlass::init_state_offset());
2398 Register iklass = op->klass()->as_register();
2399 add_debug_info_for_null_check_here(op->stub()->info());
2400 if (Immediate::is_uimm12(state_offset)) {
2401 __ z_cli(state_offset, iklass, InstanceKlass::fully_initialized);
2402 } else {
2403 __ z_cliy(state_offset, iklass, InstanceKlass::fully_initialized);
2404 }
2405 __ branch_optimized(Assembler::bcondNotEqual, *op->stub()->entry()); // Use long branch, because slow_case might be far.
2406 }
2407 __ allocate_object(op->obj()->as_register(),
2408 op->tmp1()->as_register(),
2409 op->tmp2()->as_register(),
2410 op->header_size(),
2411 op->object_size(),
2412 op->klass()->as_register(),
2413 *op->stub()->entry());
2414 __ bind(*op->stub()->continuation());
2415 __ verify_oop(op->obj()->as_register(), FILE_AND_LINE);
2416 }
2417
2418 void LIR_Assembler::emit_alloc_array(LIR_OpAllocArray* op) {
2419 Register len = op->len()->as_register();
2420 __ move_reg_if_needed(len, T_LONG, len, T_INT); // sign extend
2421
2422 if (UseSlowPath ||
2423 (!UseFastNewObjectArray && (is_reference_type(op->type()))) ||
2424 (!UseFastNewTypeArray && (!is_reference_type(op->type())))) {
2425 __ z_brul(*op->stub()->entry());
2426 } else {
2427 __ allocate_array(op->obj()->as_register(),
2428 op->len()->as_register(),
2429 op->tmp1()->as_register(),
2430 op->tmp2()->as_register(),
2431 arrayOopDesc::header_size(op->type()),
2432 type2aelembytes(op->type()),
2433 op->klass()->as_register(),
2434 *op->stub()->entry());
2435 }
2531 __ or2mem_8(data_addr, header_bits);
2532 __ branch_optimized(Assembler::bcondAlways, *obj_is_null);
2533 __ bind(not_null);
2534 } else {
2535 __ compareU64_and_branch(obj, (intptr_t) 0, Assembler::bcondEqual, *obj_is_null);
2536 }
2537
2538 NearLabel profile_cast_failure, profile_cast_success;
2539 Label *failure_target = op->should_profile() ? &profile_cast_failure : failure;
2540 Label *success_target = op->should_profile() ? &profile_cast_success : success;
2541
2542 // Patching may screw with our temporaries on sparc,
2543 // so let's do it before loading the class.
2544 if (k->is_loaded()) {
2545 metadata2reg(k->constant_encoding(), k_RInfo);
2546 } else {
2547 klass2reg_with_patching(k_RInfo, op->info_for_patch());
2548 }
2549 assert(obj != k_RInfo, "must be different");
2550
2551 __ verify_oop(obj, FILE_AND_LINE);
2552
2553 // Get object class.
2554 // Not a safepoint as obj null check happens earlier.
2555 if (op->fast_check()) {
2556 if (UseCompressedClassPointers) {
2557 __ load_klass(klass_RInfo, obj);
2558 __ compareU64_and_branch(k_RInfo, klass_RInfo, Assembler::bcondNotEqual, *failure_target);
2559 } else {
2560 __ z_cg(k_RInfo, Address(obj, oopDesc::klass_offset_in_bytes()));
2561 __ branch_optimized(Assembler::bcondNotEqual, *failure_target);
2562 }
2563 // Successful cast, fall through to profile or jump.
2564 } else {
2565 bool need_slow_path = !k->is_loaded() ||
2566 ((int) k->super_check_offset() == in_bytes(Klass::secondary_super_cache_offset()));
2567 intptr_t super_check_offset = k->is_loaded() ? k->super_check_offset() : -1L;
2568 __ load_klass(klass_RInfo, obj);
2569 // Perform the fast part of the checking logic.
2570 __ check_klass_subtype_fast_path(klass_RInfo, k_RInfo, Rtmp1,
2571 (need_slow_path ? success_target : NULL),
2992
2993 void LIR_Assembler::emit_profile_type(LIR_OpProfileType* op) {
2994 Register obj = op->obj()->as_register();
2995 Register tmp1 = op->tmp()->as_pointer_register();
2996 Register tmp2 = Z_R1_scratch;
2997 Address mdo_addr = as_Address(op->mdp()->as_address_ptr());
2998 ciKlass* exact_klass = op->exact_klass();
2999 intptr_t current_klass = op->current_klass();
3000 bool not_null = op->not_null();
3001 bool no_conflict = op->no_conflict();
3002
3003 Label update, next, none, null_seen, init_klass;
3004
3005 bool do_null = !not_null;
3006 bool exact_klass_set = exact_klass != NULL && ciTypeEntries::valid_ciklass(current_klass) == exact_klass;
3007 bool do_update = !TypeEntries::is_type_unknown(current_klass) && !exact_klass_set;
3008
3009 assert(do_null || do_update, "why are we here?");
3010 assert(!TypeEntries::was_null_seen(current_klass) || do_update, "why are we here?");
3011
3012 __ verify_oop(obj, FILE_AND_LINE);
3013
3014 if (do_null || tmp1 != obj DEBUG_ONLY(|| true)) {
3015 __ z_ltgr(tmp1, obj);
3016 }
3017 if (do_null) {
3018 __ z_brnz(update);
3019 if (!TypeEntries::was_null_seen(current_klass)) {
3020 __ z_lg(tmp1, mdo_addr);
3021 __ z_oill(tmp1, TypeEntries::null_seen);
3022 __ z_stg(tmp1, mdo_addr);
3023 }
3024 if (do_update) {
3025 __ z_bru(next);
3026 }
3027 } else {
3028 __ asm_assert_ne("unexpect null obj", __LINE__);
3029 }
3030
3031 __ bind(update);
3032
|