< prev index next >

src/cpu/x86/vm/stubGenerator_x86_32.cpp

Print this page

        

@@ -2091,29 +2091,10 @@
                                entry_oop_arraycopy,
                                entry_jlong_arraycopy,
                                entry_checkcast_arraycopy);
   }
 
-  void generate_math_stubs() {
-    {
-      StubCodeMark mark(this, "StubRoutines", "log10");
-      StubRoutines::_intrinsic_log10 = (double (*)(double)) __ pc();
-
-      __ fld_d(Address(rsp, 4));
-      __ flog10();
-      __ ret(0);
-    }
-    {
-      StubCodeMark mark(this, "StubRoutines", "tan");
-      StubRoutines::_intrinsic_tan = (double (*)(double)) __ pc();
-
-      __ fld_d(Address(rsp, 4));
-      __ trigfunc('t');
-      __ ret(0);
-    }
-  }
-
   // AES intrinsic stubs
   enum {AESBlockSize = 16};
 
   address generate_key_shuffle_mask() {
     __ align(16);

@@ -3532,10 +3513,35 @@
 
    return start;
 
  }
 
+ address generate_libmLog10() {
+   address start = __ pc();
+
+   const XMMRegister x0 = xmm0;
+   const XMMRegister x1 = xmm1;
+   const XMMRegister x2 = xmm2;
+   const XMMRegister x3 = xmm3;
+
+   const XMMRegister x4 = xmm4;
+   const XMMRegister x5 = xmm5;
+   const XMMRegister x6 = xmm6;
+   const XMMRegister x7 = xmm7;
+
+   const Register tmp = rbx;
+
+   BLOCK_COMMENT("Entry:");
+   __ enter(); // required for proper stackwalking of RuntimeStub frame
+   __ fast_log10(x0, x1, x2, x3, x4, x5, x6, x7, rax, rcx, rdx, tmp);
+   __ leave(); // required for proper stackwalking of RuntimeStub frame
+   __ ret(0);
+
+   return start;
+
+ }
+
  address generate_libmPow() {
    address start = __ pc();
 
    const XMMRegister x0 = xmm0;
    const XMMRegister x1 = xmm1;

@@ -3628,10 +3634,48 @@
 
    return start;
 
  }
 
+ address generate_libm_tan_cot_huge() {
+   address start = __ pc();
+
+   const XMMRegister x0 = xmm0;
+   const XMMRegister x1 = xmm1;
+
+   BLOCK_COMMENT("Entry:");
+   __ libm_tancot_huge(x0, x1, rax, rcx, rdx, rbx, rsi, rdi, rbp, rsp);
+
+   return start;
+
+ }
+
+ address generate_libmTan() {
+   address start = __ pc();
+
+   const XMMRegister x0 = xmm0;
+   const XMMRegister x1 = xmm1;
+   const XMMRegister x2 = xmm2;
+   const XMMRegister x3 = xmm3;
+
+   const XMMRegister x4 = xmm4;
+   const XMMRegister x5 = xmm5;
+   const XMMRegister x6 = xmm6;
+   const XMMRegister x7 = xmm7;
+
+   const Register tmp = rbx;
+
+   BLOCK_COMMENT("Entry:");
+   __ enter(); // required for proper stackwalking of RuntimeStub frame
+   __ fast_tan(x0, x1, x2, x3, x4, x5, x6, x7, rax, rcx, rdx, tmp);
+   __ leave(); // required for proper stackwalking of RuntimeStub frame
+   __ ret(0);
+
+   return start;
+
+ }
+
   // Safefetch stubs.
   void generate_safefetch(const char* name, int size, address* entry,
                           address* fault_pc, address* continuation_pc) {
     // safefetch signatures:
     //   int      SafeFetch32(int*      adr, int      errValue);

@@ -3852,26 +3896,43 @@
       StubRoutines::x86::generate_CRC32C_table(supports_clmul);
       StubRoutines::_crc32c_table_addr = (address)StubRoutines::x86::_crc32c_table;
       StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C(supports_clmul);
     }
     if (VM_Version::supports_sse2()) {
+      if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dexp)) {
       StubRoutines::_dexp = generate_libmExp();
+      }
+      if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dlog)) {
       StubRoutines::_dlog = generate_libmLog();
+      }
+      if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dlog10)) {
+        StubRoutines::_dlog10 = generate_libmLog10();
+      }
+      if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dpow)) {
       StubRoutines::_dpow = generate_libmPow();
-      if (UseLibmSinIntrinsic || UseLibmCosIntrinsic) {
+      }
+      if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dsin) ||
+          vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dcos) ||
+          vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dtan)) {
         StubRoutines::_dlibm_reduce_pi04l = generate_libm_reduce_pi04l();
+      }
+      if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dsin) ||
+          vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dcos)) {
         StubRoutines::_dlibm_sin_cos_huge = generate_libm_sin_cos_huge();
       }
-      if (UseLibmSinIntrinsic) {
+      if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dsin)) {
         StubRoutines::_dsin = generate_libmSin();
       }
-      if (UseLibmCosIntrinsic) {
+      if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dcos)) {
         StubRoutines::_dcos = generate_libmCos();
       }
+      if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dtan)) {
+        StubRoutines::_dlibm_tan_cot_huge = generate_libm_tan_cot_huge();
+        StubRoutines::_dtan = generate_libmTan();
+      }
     }
   }
-
 
   void generate_all() {
     // Generates all stubs and initializes the entry points
 
     // These entry points require SharedInfo::stack0 to be set up in non-core builds

@@ -3887,12 +3948,10 @@
     StubRoutines::_verify_oop_subroutine_entry     = generate_verify_oop();
 
     // arraycopy stubs used by compilers
     generate_arraycopy_stubs();
 
-    generate_math_stubs();
-
     // don't bother generating these AES intrinsic stubs unless global flag is set
     if (UseAESIntrinsics) {
       StubRoutines::x86::_key_shuffle_mask_addr = generate_key_shuffle_mask();  // might be needed by the others
 
       StubRoutines::_aescrypt_encryptBlock = generate_aescrypt_encryptBlock();
< prev index next >