368 __ kmovwl(k1, rcx);
369 __ evpbroadcastd(xmm0, xmm0, Assembler::AVX_512bit);
370 __ evmovdqul(xmm7, xmm0, Assembler::AVX_512bit);
371 #ifdef _LP64
372 __ evmovdqul(xmm8, xmm0, Assembler::AVX_512bit);
373 __ evmovdqul(xmm31, xmm0, Assembler::AVX_512bit);
374 #endif
375 VM_Version::clean_cpuFeatures();
376 __ jmp(save_restore_except);
377
378 __ bind(legacy_setup);
379 // AVX setup
380 VM_Version::set_avx_cpuFeatures(); // Enable temporary to pass asserts
381 UseAVX = 1;
382 UseSSE = 2;
383 // load value into all 32 bytes of ymm7 register
384 __ movl(rcx, VM_Version::ymm_test_value());
385
386 __ movdl(xmm0, rcx);
387 __ pshufd(xmm0, xmm0, 0x00);
388 __ vinsertf128h(xmm0, xmm0, xmm0);
389 __ vmovdqu(xmm7, xmm0);
390 #ifdef _LP64
391 __ vmovdqu(xmm8, xmm0);
392 __ vmovdqu(xmm15, xmm0);
393 #endif
394 VM_Version::clean_cpuFeatures();
395
396 __ bind(save_restore_except);
397 __ xorl(rsi, rsi);
398 VM_Version::set_cpuinfo_segv_addr(__ pc());
399 // Generate SEGV
400 __ movl(rax, Address(rsi, 0));
401
402 VM_Version::set_cpuinfo_cont_addr(__ pc());
403 // Returns here after signal. Save xmm0 to check it later.
404
405 // check _cpuid_info.sef_cpuid7_ebx.bits.avx512f
406 __ lea(rsi, Address(rbp, in_bytes(VM_Version::sef_cpuid7_offset())));
407 __ movl(rax, 0x10000);
408 __ andl(rax, Address(rsi, 4));
|
368 __ kmovwl(k1, rcx);
369 __ evpbroadcastd(xmm0, xmm0, Assembler::AVX_512bit);
370 __ evmovdqul(xmm7, xmm0, Assembler::AVX_512bit);
371 #ifdef _LP64
372 __ evmovdqul(xmm8, xmm0, Assembler::AVX_512bit);
373 __ evmovdqul(xmm31, xmm0, Assembler::AVX_512bit);
374 #endif
375 VM_Version::clean_cpuFeatures();
376 __ jmp(save_restore_except);
377
378 __ bind(legacy_setup);
379 // AVX setup
380 VM_Version::set_avx_cpuFeatures(); // Enable temporary to pass asserts
381 UseAVX = 1;
382 UseSSE = 2;
383 // load value into all 32 bytes of ymm7 register
384 __ movl(rcx, VM_Version::ymm_test_value());
385
386 __ movdl(xmm0, rcx);
387 __ pshufd(xmm0, xmm0, 0x00);
388 __ vinsertf128(xmm0, xmm0, xmm0, 0x01);
389 __ vmovdqu(xmm7, xmm0);
390 #ifdef _LP64
391 __ vmovdqu(xmm8, xmm0);
392 __ vmovdqu(xmm15, xmm0);
393 #endif
394 VM_Version::clean_cpuFeatures();
395
396 __ bind(save_restore_except);
397 __ xorl(rsi, rsi);
398 VM_Version::set_cpuinfo_segv_addr(__ pc());
399 // Generate SEGV
400 __ movl(rax, Address(rsi, 0));
401
402 VM_Version::set_cpuinfo_cont_addr(__ pc());
403 // Returns here after signal. Save xmm0 to check it later.
404
405 // check _cpuid_info.sef_cpuid7_ebx.bits.avx512f
406 __ lea(rsi, Address(rbp, in_bytes(VM_Version::sef_cpuid7_offset())));
407 __ movl(rax, 0x10000);
408 __ andl(rax, Address(rsi, 4));
|