src/cpu/x86/vm/stubGenerator_x86_64.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File 8055494 Sdiff src/cpu/x86/vm

src/cpu/x86/vm/stubGenerator_x86_64.cpp

Print this page




3660     // rscratch1: r10
3661     const Register crc   = c_rarg0;  // crc
3662     const Register buf   = c_rarg1;  // source java byte array address
3663     const Register len   = c_rarg2;  // length
3664     const Register table = c_rarg3;  // crc_table address (reuse register)
3665     const Register tmp   = r11;
3666     assert_different_registers(crc, buf, len, table, tmp, rax);
3667 
3668     BLOCK_COMMENT("Entry:");
3669     __ enter(); // required for proper stackwalking of RuntimeStub frame
3670 
3671     __ kernel_crc32(crc, buf, len, table, tmp);
3672 
3673     __ movl(rax, crc);
3674     __ leave(); // required for proper stackwalking of RuntimeStub frame
3675     __ ret(0);
3676 
3677     return start;
3678   }
3679 
































































3680 #undef __
3681 #define __ masm->
3682 
3683   // Continuation point for throwing of implicit exceptions that are
3684   // not handled in the current activation. Fabricates an exception
3685   // oop and initiates normal exception dispatching in this
3686   // frame. Since we need to preserve callee-saved values (currently
3687   // only for C2, but done for C1 as well) we need a callee-saved oop
3688   // map and therefore have to make these stubs into RuntimeStubs
3689   // rather than BufferBlobs.  If the compiler needs all registers to
3690   // be preserved between the fault point and the exception handler
3691   // then it must assume responsibility for that in
3692   // AbstractCompiler::continuation_for_implicit_null_exception or
3693   // continuation_for_implicit_division_by_zero_exception. All other
3694   // implicit exceptions (e.g., NullPointerException or
3695   // AbstractMethodError on entry) are either at call sites or
3696   // otherwise assume that stack unwinding will be initiated, so
3697   // caller saved registers were assumed volatile in the compiler.
3698   address generate_throw_exception(const char* name,
3699                                    address runtime_entry,


3900 
3901     generate_math_stubs();
3902 
3903     // don't bother generating these AES intrinsic stubs unless global flag is set
3904     if (UseAESIntrinsics) {
3905       StubRoutines::x86::_key_shuffle_mask_addr = generate_key_shuffle_mask();  // needed by the others
3906 
3907       StubRoutines::_aescrypt_encryptBlock = generate_aescrypt_encryptBlock();
3908       StubRoutines::_aescrypt_decryptBlock = generate_aescrypt_decryptBlock();
3909       StubRoutines::_cipherBlockChaining_encryptAESCrypt = generate_cipherBlockChaining_encryptAESCrypt();
3910       StubRoutines::_cipherBlockChaining_decryptAESCrypt = generate_cipherBlockChaining_decryptAESCrypt_Parallel();
3911     }
3912 
3913     // Safefetch stubs.
3914     generate_safefetch("SafeFetch32", sizeof(int),     &StubRoutines::_safefetch32_entry,
3915                                                        &StubRoutines::_safefetch32_fault_pc,
3916                                                        &StubRoutines::_safefetch32_continuation_pc);
3917     generate_safefetch("SafeFetchN", sizeof(intptr_t), &StubRoutines::_safefetchN_entry,
3918                                                        &StubRoutines::_safefetchN_fault_pc,
3919                                                        &StubRoutines::_safefetchN_continuation_pc);





3920   }
3921 
3922  public:
3923   StubGenerator(CodeBuffer* code, bool all) : StubCodeGenerator(code) {
3924     if (all) {
3925       generate_all();
3926     } else {
3927       generate_initial();
3928     }
3929   }
3930 }; // end class declaration
3931 
3932 void StubGenerator_generate(CodeBuffer* code, bool all) {
3933   StubGenerator g(code, all);
3934 }


3660     // rscratch1: r10
3661     const Register crc   = c_rarg0;  // crc
3662     const Register buf   = c_rarg1;  // source java byte array address
3663     const Register len   = c_rarg2;  // length
3664     const Register table = c_rarg3;  // crc_table address (reuse register)
3665     const Register tmp   = r11;
3666     assert_different_registers(crc, buf, len, table, tmp, rax);
3667 
3668     BLOCK_COMMENT("Entry:");
3669     __ enter(); // required for proper stackwalking of RuntimeStub frame
3670 
3671     __ kernel_crc32(crc, buf, len, table, tmp);
3672 
3673     __ movl(rax, crc);
3674     __ leave(); // required for proper stackwalking of RuntimeStub frame
3675     __ ret(0);
3676 
3677     return start;
3678   }
3679 
3680 
3681   /**
3682    *  Arguments:
3683    *
3684    *  Input:
3685    *    c_rarg0   - x address
3686    *    c_rarg1   - x length
3687    *    c_rarg2   - y address
3688    *    c_rarg3   - y lenth
3689    * not Win64
3690    *    c_rarg4   - z address
3691    *    c_rarg5   - z length
3692    * Win64
3693    *    rsp+40    - z address
3694    *    rsp+48    - z length
3695    */
3696   address generate_multiplyToLen() {
3697     __ align(CodeEntryAlignment);
3698     StubCodeMark mark(this, "StubRoutines", "multiplyToLen");
3699 
3700     address start = __ pc();
3701     // Win64: rcx, rdx, r8, r9 (c_rarg0, c_rarg1, ...)
3702     // Unix:  rdi, rsi, rdx, rcx, r8, r9 (c_rarg0, c_rarg1, ...)
3703     const Register x     = rdi;
3704     const Register xlen  = rax;
3705     const Register y     = rsi;
3706     const Register ylen  = rcx;
3707     const Register z     = r8;
3708     const Register zlen  = r11;
3709 
3710     // Next registers will be saved on stack in multiply_to_len().
3711     const Register tmp1  = r12;
3712     const Register tmp2  = r13;
3713     const Register tmp3  = r14;
3714     const Register tmp4  = r15;
3715     const Register tmp5  = rbx;
3716 
3717     BLOCK_COMMENT("Entry:");
3718     __ enter(); // required for proper stackwalking of RuntimeStub frame
3719 
3720 #ifndef _WIN64
3721     __ movptr(zlen, r9); // Save r9 in r11 - zlen
3722 #endif
3723     setup_arg_regs(4); // x => rdi, xlen => rsi, y => rdx
3724                        // ylen => rcx, z => r8, zlen => r11
3725                        // r9 and r10 may be used to save non-volatile registers
3726 #ifdef _WIN64
3727     // last 2 arguments (#4, #5) are on stack on Win64
3728     __ movptr(z, Address(rsp, 6 * wordSize));
3729     __ movptr(zlen, Address(rsp, 7 * wordSize));
3730 #endif
3731 
3732     __ movptr(xlen, rsi);
3733     __ movptr(y,    rdx);
3734     __ multiply_to_len(x, xlen, y, ylen, z, zlen, tmp1, tmp2, tmp3, tmp4, tmp5);
3735 
3736     restore_arg_regs();
3737 
3738     __ leave(); // required for proper stackwalking of RuntimeStub frame
3739     __ ret(0);
3740 
3741     return start;
3742   }
3743 
3744 #undef __
3745 #define __ masm->
3746 
3747   // Continuation point for throwing of implicit exceptions that are
3748   // not handled in the current activation. Fabricates an exception
3749   // oop and initiates normal exception dispatching in this
3750   // frame. Since we need to preserve callee-saved values (currently
3751   // only for C2, but done for C1 as well) we need a callee-saved oop
3752   // map and therefore have to make these stubs into RuntimeStubs
3753   // rather than BufferBlobs.  If the compiler needs all registers to
3754   // be preserved between the fault point and the exception handler
3755   // then it must assume responsibility for that in
3756   // AbstractCompiler::continuation_for_implicit_null_exception or
3757   // continuation_for_implicit_division_by_zero_exception. All other
3758   // implicit exceptions (e.g., NullPointerException or
3759   // AbstractMethodError on entry) are either at call sites or
3760   // otherwise assume that stack unwinding will be initiated, so
3761   // caller saved registers were assumed volatile in the compiler.
3762   address generate_throw_exception(const char* name,
3763                                    address runtime_entry,


3964 
3965     generate_math_stubs();
3966 
3967     // don't bother generating these AES intrinsic stubs unless global flag is set
3968     if (UseAESIntrinsics) {
3969       StubRoutines::x86::_key_shuffle_mask_addr = generate_key_shuffle_mask();  // needed by the others
3970 
3971       StubRoutines::_aescrypt_encryptBlock = generate_aescrypt_encryptBlock();
3972       StubRoutines::_aescrypt_decryptBlock = generate_aescrypt_decryptBlock();
3973       StubRoutines::_cipherBlockChaining_encryptAESCrypt = generate_cipherBlockChaining_encryptAESCrypt();
3974       StubRoutines::_cipherBlockChaining_decryptAESCrypt = generate_cipherBlockChaining_decryptAESCrypt_Parallel();
3975     }
3976 
3977     // Safefetch stubs.
3978     generate_safefetch("SafeFetch32", sizeof(int),     &StubRoutines::_safefetch32_entry,
3979                                                        &StubRoutines::_safefetch32_fault_pc,
3980                                                        &StubRoutines::_safefetch32_continuation_pc);
3981     generate_safefetch("SafeFetchN", sizeof(intptr_t), &StubRoutines::_safefetchN_entry,
3982                                                        &StubRoutines::_safefetchN_fault_pc,
3983                                                        &StubRoutines::_safefetchN_continuation_pc);
3984 #ifdef COMPILER2
3985     if (UseMultiplyToLenIntrinsic) {
3986       StubRoutines::_multiplyToLen = generate_multiplyToLen();
3987     }
3988 #endif
3989   }
3990 
3991  public:
3992   StubGenerator(CodeBuffer* code, bool all) : StubCodeGenerator(code) {
3993     if (all) {
3994       generate_all();
3995     } else {
3996       generate_initial();
3997     }
3998   }
3999 }; // end class declaration
4000 
4001 void StubGenerator_generate(CodeBuffer* code, bool all) {
4002   StubGenerator g(code, all);
4003 }
src/cpu/x86/vm/stubGenerator_x86_64.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File