< prev index next >

src/hotspot/cpu/s390/macroAssembler_s390.cpp

Print this page
rev 51624 : [mq]: 8210319.patch

@@ -463,23 +463,12 @@
     z_xilf(r1, -1);
     if (wide) {
       z_xihf(r1, -1);
     }
   } else { // Distinct src and dst registers.
-    if (VM_Version::has_DistinctOpnds()) {
       load_const_optimized(r1, -1);
-      z_xgrk(r1, r2, r1);
-    } else {
-      if (wide) {
-        z_lgr(r1, r2);
-        z_xilf(r1, -1);
-        z_xihf(r1, -1);
-      } else {
-        z_lr(r1, r2);
-        z_xilf(r1, -1);
-      }
-    }
+    z_xgr(r1, r2);
   }
 }
 
 unsigned long MacroAssembler::create_mask(int lBitPos, int rBitPos) {
   assert(lBitPos >=  0,      "zero is  leftmost bit position");

@@ -1156,12 +1145,14 @@
 // Load a 64bit constant.
 // Patchable code sequence, but not atomically patchable.
 // Make sure to keep code size constant -> no value-dependent optimizations.
 // Do not kill condition code.
 void MacroAssembler::load_const(Register t, long x) {
-  Assembler::z_iihf(t, (int)(x >> 32));
-  Assembler::z_iilf(t, (int)(x & 0xffffffff));
+  // Note: Right shift is only cleanly defined for unsigned types
+  //       or for signed types with nonnegative values.
+  Assembler::z_iihf(t, (long)((unsigned long)x >> 32));
+  Assembler::z_iilf(t, (long)((unsigned long)x & 0xffffffffUL));
 }
 
 // Load a 32bit constant into a 64bit register, sign-extend or zero-extend.
 // Patchable code sequence, but not atomically patchable.
 // Make sure to keep code size constant -> no value-dependent optimizations.

@@ -1254,12 +1245,14 @@
 //-----------------------------------
 
 // CPU-version dependend patching of load_const.
 void MacroAssembler::patch_const(address a, long x) {
   assert(is_load_const(a), "not a load of a constant");
-  set_imm32((address)a, (int) ((x >> 32) & 0xffffffff));
-  set_imm32((address)(a + 6), (int)(x & 0xffffffff));
+  // Note: Right shift is only cleanly defined for unsigned types
+  //       or for signed types with nonnegative values.
+  set_imm32((address)a, (long)((unsigned long)x >> 32));
+  set_imm32((address)(a + 6), (long)((unsigned long)x & 0xffffffffUL));
 }
 
 // Patching the value of CPU version dependent load_const_32to64 sequence.
 // The passed ptr MUST be in compressed format!
 int MacroAssembler::patch_load_const_32to64(address pos, int64_t np) {

@@ -1459,46 +1452,50 @@
     return 4;
   }
 
   // 64 bit value: | part1 | part2 | part3 | part4 |
   // At least one part is not zero!
-  int part1 = ((x >> 32) & 0xffff0000) >> 16;
-  int part2 = (x >> 32) & 0x0000ffff;
-  int part3 = (x & 0xffff0000) >> 16;
-  int part4 = (x & 0x0000ffff);
+  // Note: Right shift is only cleanly defined for unsigned types
+  //       or for signed types with nonnegative values.
+  int part1 = (int)((unsigned long)x >> 48) & 0x0000ffff;
+  int part2 = (int)((unsigned long)x >> 32) & 0x0000ffff;
+  int part3 = (int)((unsigned long)x >> 16) & 0x0000ffff;
+  int part4 = (int)x & 0x0000ffff;
+  int part12 = (int)((unsigned long)x >> 32);
+  int part34 = (int)x;
 
   // Lower word only (unsigned).
-  if ((part1 == 0) && (part2 == 0)) {
+  if (part12 == 0) {
     if (part3 == 0) {
       if (emit) z_llill(t, part4);
       return 4;
     }
     if (part4 == 0) {
       if (emit) z_llilh(t, part3);
       return 4;
     }
-    if (emit) z_llilf(t, (int)(x & 0xffffffff));
+    if (emit) z_llilf(t, part34);
     return 6;
   }
 
   // Upper word only.
-  if ((part3 == 0) && (part4 == 0)) {
+  if (part34 == 0) {
     if (part1 == 0) {
       if (emit) z_llihl(t, part2);
       return 4;
     }
     if (part2 == 0) {
       if (emit) z_llihh(t, part1);
       return 4;
     }
-    if (emit) z_llihf(t, (int)(x >> 32));
+    if (emit) z_llihf(t, part12);
     return 6;
   }
 
   // Lower word only (signed).
   if ((part1 == 0x0000ffff) && (part2 == 0x0000ffff) && ((part3 & 0x00008000) != 0)) {
-    if (emit) z_lgfi(t, (int)(x & 0xffffffff));
+    if (emit) z_lgfi(t, part34);
     return 6;
   }
 
   int len = 0;
 

@@ -1509,11 +1506,11 @@
     } else {
       if (emit) z_llihh(t, part1);
       len += 4;
     }
   } else {
-    if (emit) z_llihf(t, (int)(x >> 32));
+    if (emit) z_llihf(t, part12);
     len += 6;
   }
 
   if ((part3 == 0) || (part4 == 0)) {
     if (part3 == 0) {

@@ -1522,11 +1519,11 @@
     } else {
       if (emit) z_iilh(t, part3);
       len += 4;
     }
   } else {
-    if (emit) z_iilf(t, (int)(x & 0xffffffff));
+    if (emit) z_iilf(t, part34);
     len += 6;
   }
   return len;
 }
 
< prev index next >