--- old/src/cpu/x86/vm/stubGenerator_x86_32.cpp 2015-04-06 14:41:03.990099200 -0700 +++ new/src/cpu/x86/vm/stubGenerator_x86_32.cpp 2015-04-06 14:41:03.794079600 -0700 @@ -794,7 +794,10 @@ __ BIND(L_copy_64_bytes_loop); if (UseUnalignedLoadStores) { - if (UseAVX >= 2) { + if (UseAVX > 2) { + __ evmovdqu(xmm0, Address(from, 0), Assembler::AVX_512bit); + __ evmovdqu(Address(from, to_from, Address::times_1, 0), xmm0, Assembler::AVX_512bit); + } else if (UseAVX == 2) { __ vmovdqu(xmm0, Address(from, 0)); __ vmovdqu(Address(from, to_from, Address::times_1, 0), xmm0); __ vmovdqu(xmm1, Address(from, 32)); @@ -833,7 +836,7 @@ __ subl(qword_count, 8); __ jcc(Assembler::greaterEqual, L_copy_64_bytes_loop); - if (UseUnalignedLoadStores && (UseAVX >= 2)) { + if (UseUnalignedLoadStores && (UseAVX == 2)) { // clean upper bits of YMM registers __ vzeroupper(); }