5141 }
5142
5143 if (UseMulAddIntrinsic) {
5144 StubRoutines::_mulAdd = generate_mulAdd();
5145 }
5146
5147 if (UseMontgomeryMultiplyIntrinsic) {
5148 StubCodeMark mark(this, "StubRoutines", "montgomeryMultiply");
5149 MontgomeryMultiplyGenerator g(_masm, /*squaring*/false);
5150 StubRoutines::_montgomeryMultiply = g.generate_multiply();
5151 }
5152
5153 if (UseMontgomerySquareIntrinsic) {
5154 StubCodeMark mark(this, "StubRoutines", "montgomerySquare");
5155 MontgomeryMultiplyGenerator g(_masm, /*squaring*/true);
5156 // We use generate_multiply() rather than generate_square()
5157 // because it's faster for the sizes of modulus we care about.
5158 StubRoutines::_montgomerySquare = g.generate_multiply();
5159 }
5160
5161 if (UseShenandoahGC && (ShenandoahWriteBarrier || ShenandoahStoreValWriteBarrier)) {
5162 StubRoutines::aarch64::_shenandoah_wb = generate_shenandoah_wb(false, true);
5163 StubRoutines::_shenandoah_wb_C = generate_shenandoah_wb(true, !ShenandoahWriteBarrierCsetTestInIR);
5164 }
5165
5166 #ifndef BUILTIN_SIM
5167 // generate GHASH intrinsics code
5168 if (UseGHASHIntrinsics) {
5169 StubRoutines::_ghash_processBlocks = generate_ghash_processBlocks();
5170 }
5171
5172 if (UseAESIntrinsics) {
5173 StubRoutines::_aescrypt_encryptBlock = generate_aescrypt_encryptBlock();
5174 StubRoutines::_aescrypt_decryptBlock = generate_aescrypt_decryptBlock();
5175 StubRoutines::_cipherBlockChaining_encryptAESCrypt = generate_cipherBlockChaining_encryptAESCrypt();
5176 StubRoutines::_cipherBlockChaining_decryptAESCrypt = generate_cipherBlockChaining_decryptAESCrypt();
5177 }
5178
5179 if (UseSHA1Intrinsics) {
5180 StubRoutines::_sha1_implCompress = generate_sha1_implCompress(false, "sha1_implCompress");
5181 StubRoutines::_sha1_implCompressMB = generate_sha1_implCompress(true, "sha1_implCompressMB");
|
5141 }
5142
5143 if (UseMulAddIntrinsic) {
5144 StubRoutines::_mulAdd = generate_mulAdd();
5145 }
5146
5147 if (UseMontgomeryMultiplyIntrinsic) {
5148 StubCodeMark mark(this, "StubRoutines", "montgomeryMultiply");
5149 MontgomeryMultiplyGenerator g(_masm, /*squaring*/false);
5150 StubRoutines::_montgomeryMultiply = g.generate_multiply();
5151 }
5152
5153 if (UseMontgomerySquareIntrinsic) {
5154 StubCodeMark mark(this, "StubRoutines", "montgomerySquare");
5155 MontgomeryMultiplyGenerator g(_masm, /*squaring*/true);
5156 // We use generate_multiply() rather than generate_square()
5157 // because it's faster for the sizes of modulus we care about.
5158 StubRoutines::_montgomerySquare = g.generate_multiply();
5159 }
5160
5161 if (UseShenandoahGC && ShenandoahWriteBarrier) {
5162 StubRoutines::aarch64::_shenandoah_wb = generate_shenandoah_wb(false, true);
5163 StubRoutines::_shenandoah_wb_C = generate_shenandoah_wb(true, !ShenandoahWriteBarrierCsetTestInIR);
5164 }
5165
5166 #ifndef BUILTIN_SIM
5167 // generate GHASH intrinsics code
5168 if (UseGHASHIntrinsics) {
5169 StubRoutines::_ghash_processBlocks = generate_ghash_processBlocks();
5170 }
5171
5172 if (UseAESIntrinsics) {
5173 StubRoutines::_aescrypt_encryptBlock = generate_aescrypt_encryptBlock();
5174 StubRoutines::_aescrypt_decryptBlock = generate_aescrypt_decryptBlock();
5175 StubRoutines::_cipherBlockChaining_encryptAESCrypt = generate_cipherBlockChaining_encryptAESCrypt();
5176 StubRoutines::_cipherBlockChaining_decryptAESCrypt = generate_cipherBlockChaining_decryptAESCrypt();
5177 }
5178
5179 if (UseSHA1Intrinsics) {
5180 StubRoutines::_sha1_implCompress = generate_sha1_implCompress(false, "sha1_implCompress");
5181 StubRoutines::_sha1_implCompressMB = generate_sha1_implCompress(true, "sha1_implCompressMB");
|