787 __ stmxcsr(mxcsr_save);
788 __ movl(rax, mxcsr_save);
789 __ andl(rax, MXCSR_MASK); // Only check control and mask bits
790 __ cmp32(rax, mxcsr_std);
791 __ jcc(Assembler::equal, ok_ret);
792
793 __ warn("MXCSR changed by native JNI code, use -XX:+RestoreMXCSROnJNICall");
794
795 __ ldmxcsr(mxcsr_std);
796
797 __ bind(ok_ret);
798 __ addptr(rsp, wordSize);
799 __ pop(rax);
800 }
801
802 __ ret(0);
803
804 return start;
805 }
806
807 address generate_f2i_fixup() {
808 StubCodeMark mark(this, "StubRoutines", "f2i_fixup");
809 Address inout(rsp, 5 * wordSize); // return address + 4 saves
810
811 address start = __ pc();
812
813 Label L;
814
815 __ push(rax);
816 __ push(c_rarg3);
817 __ push(c_rarg2);
818 __ push(c_rarg1);
819
820 __ movl(rax, 0x7f800000);
821 __ xorl(c_rarg3, c_rarg3);
822 __ movl(c_rarg2, inout);
823 __ movl(c_rarg1, c_rarg2);
824 __ andl(c_rarg1, 0x7fffffff);
825 __ cmpl(rax, c_rarg1); // NaN? -> 0
826 __ jcc(Assembler::negative, L);
1217 const Register saved_rdi = r9;
1218 const Register saved_rsi = r10;
1219 #ifdef _WIN64
1220 __ movptr(rdi, saved_rdi);
1221 __ movptr(rsi, saved_rsi);
1222 #endif
1223 }
1224
1225 // Generate code for an array write pre barrier
1226 //
1227 // addr - starting address
1228 // count - element count
1229 // tmp - scratch register
1230 //
1231 // Destroy no registers!
1232 //
1233 void gen_write_ref_array_pre_barrier(Register addr, Register count, bool dest_uninitialized) {
1234 BarrierSet* bs = Universe::heap()->barrier_set();
1235 switch (bs->kind()) {
1236 case BarrierSet::G1SATBCTLogging:
1237 // With G1, don't generate the call if we statically know that the target in uninitialized
1238 if (!dest_uninitialized) {
1239 __ pusha(); // push registers
1240 if (count == c_rarg0) {
1241 if (addr == c_rarg1) {
1242 // exactly backwards!!
1243 __ xchgptr(c_rarg1, c_rarg0);
1244 } else {
1245 __ movptr(c_rarg1, count);
1246 __ movptr(c_rarg0, addr);
1247 }
1248 } else {
1249 __ movptr(c_rarg0, addr);
1250 __ movptr(c_rarg1, count);
1251 }
1252 __ call_VM_leaf(CAST_FROM_FN_PTR(address, BarrierSet::static_write_ref_array_pre), 2);
1253 __ popa();
1254 }
1255 break;
1256 case BarrierSet::CardTableForRS:
1261 ShouldNotReachHere();
1262
1263 }
1264 }
1265
1266 //
1267 // Generate code for an array write post barrier
1268 //
1269 // Input:
1270 // start - register containing starting address of destination array
1271 // count - elements count
1272 // scratch - scratch register
1273 //
1274 // The input registers are overwritten.
1275 //
1276 void gen_write_ref_array_post_barrier(Register start, Register count, Register scratch) {
1277 assert_different_registers(start, count, scratch);
1278 BarrierSet* bs = Universe::heap()->barrier_set();
1279 switch (bs->kind()) {
1280 case BarrierSet::G1SATBCTLogging:
1281 {
1282 __ pusha(); // push registers (overkill)
1283 if (c_rarg0 == count) { // On win64 c_rarg0 == rcx
1284 assert_different_registers(c_rarg1, start);
1285 __ mov(c_rarg1, count);
1286 __ mov(c_rarg0, start);
1287 } else {
1288 assert_different_registers(c_rarg0, count);
1289 __ mov(c_rarg0, start);
1290 __ mov(c_rarg1, count);
1291 }
1292 __ call_VM_leaf(CAST_FROM_FN_PTR(address, BarrierSet::static_write_ref_array_post), 2);
1293 __ popa();
1294 }
1295 break;
1296 case BarrierSet::CardTableForRS:
1297 case BarrierSet::CardTableExtension:
1298 {
1299 CardTableModRefBS* ct = barrier_set_cast<CardTableModRefBS>(bs);
1300 assert(sizeof(*ct->byte_map_base) == sizeof(jbyte), "adjust this code");
4258 // fabricate a RuntimeStub internally.
4259 StubRoutines::_throw_AbstractMethodError_entry =
4260 generate_throw_exception("AbstractMethodError throw_exception",
4261 CAST_FROM_FN_PTR(address,
4262 SharedRuntime::
4263 throw_AbstractMethodError));
4264
4265 StubRoutines::_throw_IncompatibleClassChangeError_entry =
4266 generate_throw_exception("IncompatibleClassChangeError throw_exception",
4267 CAST_FROM_FN_PTR(address,
4268 SharedRuntime::
4269 throw_IncompatibleClassChangeError));
4270
4271 StubRoutines::_throw_NullPointerException_at_call_entry =
4272 generate_throw_exception("NullPointerException at call throw_exception",
4273 CAST_FROM_FN_PTR(address,
4274 SharedRuntime::
4275 throw_NullPointerException_at_call));
4276
4277 // entry points that are platform specific
4278 StubRoutines::x86::_f2i_fixup = generate_f2i_fixup();
4279 StubRoutines::x86::_f2l_fixup = generate_f2l_fixup();
4280 StubRoutines::x86::_d2i_fixup = generate_d2i_fixup();
4281 StubRoutines::x86::_d2l_fixup = generate_d2l_fixup();
4282
4283 StubRoutines::x86::_float_sign_mask = generate_fp_mask("float_sign_mask", 0x7FFFFFFF7FFFFFFF);
4284 StubRoutines::x86::_float_sign_flip = generate_fp_mask("float_sign_flip", 0x8000000080000000);
4285 StubRoutines::x86::_double_sign_mask = generate_fp_mask("double_sign_mask", 0x7FFFFFFFFFFFFFFF);
4286 StubRoutines::x86::_double_sign_flip = generate_fp_mask("double_sign_flip", 0x8000000000000000);
4287
4288 // support for verify_oop (must happen after universe_init)
4289 StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop();
4290
4291 // arraycopy stubs used by compilers
4292 generate_arraycopy_stubs();
4293
4294 generate_math_stubs();
4295
4296 // don't bother generating these AES intrinsic stubs unless global flag is set
4297 if (UseAESIntrinsics) {
|
787 __ stmxcsr(mxcsr_save);
788 __ movl(rax, mxcsr_save);
789 __ andl(rax, MXCSR_MASK); // Only check control and mask bits
790 __ cmp32(rax, mxcsr_std);
791 __ jcc(Assembler::equal, ok_ret);
792
793 __ warn("MXCSR changed by native JNI code, use -XX:+RestoreMXCSROnJNICall");
794
795 __ ldmxcsr(mxcsr_std);
796
797 __ bind(ok_ret);
798 __ addptr(rsp, wordSize);
799 __ pop(rax);
800 }
801
802 __ ret(0);
803
804 return start;
805 }
806
807 address generate_shenandoah_wb() {
808 StubCodeMark mark(this, "StubRoutines", "shenandoah_wb");
809 address start = __ pc();
810
811 Label done;
812
813 __ push(rbx);
814 // Check for object beeing in the collection set.
815 // TODO: Can we use only 1 register here?
816 __ movptr(rdi, rax);
817 __ shrptr(rdi, ShenandoahHeapRegion::RegionSizeShift);
818 __ movptr(rbx, (intptr_t) ShenandoahHeap::in_cset_fast_test_addr());
819 __ movbool(rbx, Address(rbx, rdi, Address::times_1));
820 __ testbool(rbx);
821 __ jcc(Assembler::zero, done);
822
823 __ push(rcx);
824 __ push(rdx);
825 __ push(rdi);
826 __ push(rsi);
827 __ push(r8);
828 __ push(r9);
829 __ push(r10);
830 __ push(r11);
831 __ push(r12);
832 __ push(r13);
833 __ push(r14);
834 __ push(r15);
835 __ subptr(rsp, 128);
836 __ movdbl(Address(rsp, 0), xmm0);
837 __ movdbl(Address(rsp, 8), xmm1);
838 __ movdbl(Address(rsp, 16), xmm2);
839 __ movdbl(Address(rsp, 24), xmm3);
840 __ movdbl(Address(rsp, 32), xmm4);
841 __ movdbl(Address(rsp, 40), xmm5);
842 __ movdbl(Address(rsp, 48), xmm6);
843 __ movdbl(Address(rsp, 56), xmm7);
844 __ movdbl(Address(rsp, 64), xmm8);
845 __ movdbl(Address(rsp, 72), xmm9);
846 __ movdbl(Address(rsp, 80), xmm10);
847 __ movdbl(Address(rsp, 88), xmm11);
848 __ movdbl(Address(rsp, 96), xmm12);
849 __ movdbl(Address(rsp, 104), xmm13);
850 __ movdbl(Address(rsp, 112), xmm14);
851 __ movdbl(Address(rsp, 120), xmm15);
852 __ movptr(rdi, rax);
853 __ call_VM_leaf(CAST_FROM_FN_PTR(address, ShenandoahBarrierSet::resolve_and_maybe_copy_oop_c2), rdi);
854 __ movdbl(xmm0, Address(rsp, 0));
855 __ movdbl(xmm1, Address(rsp, 8));
856 __ movdbl(xmm2, Address(rsp, 16));
857 __ movdbl(xmm3, Address(rsp, 24));
858 __ movdbl(xmm4, Address(rsp, 32));
859 __ movdbl(xmm5, Address(rsp, 40));
860 __ movdbl(xmm6, Address(rsp, 48));
861 __ movdbl(xmm7, Address(rsp, 56));
862 __ movdbl(xmm8, Address(rsp, 64));
863 __ movdbl(xmm9, Address(rsp, 72));
864 __ movdbl(xmm10, Address(rsp, 80));
865 __ movdbl(xmm11, Address(rsp, 88));
866 __ movdbl(xmm12, Address(rsp, 96));
867 __ movdbl(xmm13, Address(rsp, 104));
868 __ movdbl(xmm14, Address(rsp, 112));
869 __ movdbl(xmm15, Address(rsp, 120));
870 __ addptr(rsp, 128);
871 __ pop(r15);
872 __ pop(r14);
873 __ pop(r13);
874 __ pop(r12);
875 __ pop(r11);
876 __ pop(r10);
877 __ pop(r9);
878 __ pop(r8);
879 __ pop(rsi);
880 __ pop(rdi);
881 __ pop(rdx);
882 __ pop(rcx);
883
884 __ bind(done);
885
886 __ pop(rbx);
887
888 __ ret(0);
889
890 return start;
891 }
892
893 address generate_f2i_fixup() {
894 StubCodeMark mark(this, "StubRoutines", "f2i_fixup");
895 Address inout(rsp, 5 * wordSize); // return address + 4 saves
896
897 address start = __ pc();
898
899 Label L;
900
901 __ push(rax);
902 __ push(c_rarg3);
903 __ push(c_rarg2);
904 __ push(c_rarg1);
905
906 __ movl(rax, 0x7f800000);
907 __ xorl(c_rarg3, c_rarg3);
908 __ movl(c_rarg2, inout);
909 __ movl(c_rarg1, c_rarg2);
910 __ andl(c_rarg1, 0x7fffffff);
911 __ cmpl(rax, c_rarg1); // NaN? -> 0
912 __ jcc(Assembler::negative, L);
1303 const Register saved_rdi = r9;
1304 const Register saved_rsi = r10;
1305 #ifdef _WIN64
1306 __ movptr(rdi, saved_rdi);
1307 __ movptr(rsi, saved_rsi);
1308 #endif
1309 }
1310
1311 // Generate code for an array write pre barrier
1312 //
1313 // addr - starting address
1314 // count - element count
1315 // tmp - scratch register
1316 //
1317 // Destroy no registers!
1318 //
1319 void gen_write_ref_array_pre_barrier(Register addr, Register count, bool dest_uninitialized) {
1320 BarrierSet* bs = Universe::heap()->barrier_set();
1321 switch (bs->kind()) {
1322 case BarrierSet::G1SATBCTLogging:
1323 case BarrierSet::ShenandoahBarrierSet:
1324 // With G1, don't generate the call if we statically know that the target in uninitialized
1325 if (!dest_uninitialized) {
1326 __ pusha(); // push registers
1327 if (count == c_rarg0) {
1328 if (addr == c_rarg1) {
1329 // exactly backwards!!
1330 __ xchgptr(c_rarg1, c_rarg0);
1331 } else {
1332 __ movptr(c_rarg1, count);
1333 __ movptr(c_rarg0, addr);
1334 }
1335 } else {
1336 __ movptr(c_rarg0, addr);
1337 __ movptr(c_rarg1, count);
1338 }
1339 __ call_VM_leaf(CAST_FROM_FN_PTR(address, BarrierSet::static_write_ref_array_pre), 2);
1340 __ popa();
1341 }
1342 break;
1343 case BarrierSet::CardTableForRS:
1348 ShouldNotReachHere();
1349
1350 }
1351 }
1352
1353 //
1354 // Generate code for an array write post barrier
1355 //
1356 // Input:
1357 // start - register containing starting address of destination array
1358 // count - elements count
1359 // scratch - scratch register
1360 //
1361 // The input registers are overwritten.
1362 //
1363 void gen_write_ref_array_post_barrier(Register start, Register count, Register scratch) {
1364 assert_different_registers(start, count, scratch);
1365 BarrierSet* bs = Universe::heap()->barrier_set();
1366 switch (bs->kind()) {
1367 case BarrierSet::G1SATBCTLogging:
1368 case BarrierSet::ShenandoahBarrierSet:
1369 {
1370 __ pusha(); // push registers (overkill)
1371 if (c_rarg0 == count) { // On win64 c_rarg0 == rcx
1372 assert_different_registers(c_rarg1, start);
1373 __ mov(c_rarg1, count);
1374 __ mov(c_rarg0, start);
1375 } else {
1376 assert_different_registers(c_rarg0, count);
1377 __ mov(c_rarg0, start);
1378 __ mov(c_rarg1, count);
1379 }
1380 __ call_VM_leaf(CAST_FROM_FN_PTR(address, BarrierSet::static_write_ref_array_post), 2);
1381 __ popa();
1382 }
1383 break;
1384 case BarrierSet::CardTableForRS:
1385 case BarrierSet::CardTableExtension:
1386 {
1387 CardTableModRefBS* ct = barrier_set_cast<CardTableModRefBS>(bs);
1388 assert(sizeof(*ct->byte_map_base) == sizeof(jbyte), "adjust this code");
4346 // fabricate a RuntimeStub internally.
4347 StubRoutines::_throw_AbstractMethodError_entry =
4348 generate_throw_exception("AbstractMethodError throw_exception",
4349 CAST_FROM_FN_PTR(address,
4350 SharedRuntime::
4351 throw_AbstractMethodError));
4352
4353 StubRoutines::_throw_IncompatibleClassChangeError_entry =
4354 generate_throw_exception("IncompatibleClassChangeError throw_exception",
4355 CAST_FROM_FN_PTR(address,
4356 SharedRuntime::
4357 throw_IncompatibleClassChangeError));
4358
4359 StubRoutines::_throw_NullPointerException_at_call_entry =
4360 generate_throw_exception("NullPointerException at call throw_exception",
4361 CAST_FROM_FN_PTR(address,
4362 SharedRuntime::
4363 throw_NullPointerException_at_call));
4364
4365 // entry points that are platform specific
4366 if (UseShenandoahGC) {
4367 StubRoutines::x86::_shenandoah_wb = generate_shenandoah_wb();
4368 }
4369 StubRoutines::x86::_f2i_fixup = generate_f2i_fixup();
4370 StubRoutines::x86::_f2l_fixup = generate_f2l_fixup();
4371 StubRoutines::x86::_d2i_fixup = generate_d2i_fixup();
4372 StubRoutines::x86::_d2l_fixup = generate_d2l_fixup();
4373
4374 StubRoutines::x86::_float_sign_mask = generate_fp_mask("float_sign_mask", 0x7FFFFFFF7FFFFFFF);
4375 StubRoutines::x86::_float_sign_flip = generate_fp_mask("float_sign_flip", 0x8000000080000000);
4376 StubRoutines::x86::_double_sign_mask = generate_fp_mask("double_sign_mask", 0x7FFFFFFFFFFFFFFF);
4377 StubRoutines::x86::_double_sign_flip = generate_fp_mask("double_sign_flip", 0x8000000000000000);
4378
4379 // support for verify_oop (must happen after universe_init)
4380 StubRoutines::_verify_oop_subroutine_entry = generate_verify_oop();
4381
4382 // arraycopy stubs used by compilers
4383 generate_arraycopy_stubs();
4384
4385 generate_math_stubs();
4386
4387 // don't bother generating these AES intrinsic stubs unless global flag is set
4388 if (UseAESIntrinsics) {
|