src/cpu/x86/vm/macroAssembler_x86.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File
*** old/src/cpu/x86/vm/macroAssembler_x86.cpp	Fri Jul 25 15:31:06 2014
--- new/src/cpu/x86/vm/macroAssembler_x86.cpp	Fri Jul 25 15:31:06 2014

*** 7314,7334 **** --- 7314,7351 ---- /** * Fold 128-bit data chunk */ void MacroAssembler::fold_128bit_crc32(XMMRegister xcrc, XMMRegister xK, XMMRegister xtmp, Register buf, int offset) { + if (UseAVX > 0) { vpclmulhdq(xtmp, xK, xcrc); // [123:64] vpclmulldq(xcrc, xK, xcrc); // [63:0] vpxor(xcrc, xcrc, Address(buf, offset), false /* vector256 */); pxor(xcrc, xtmp); + } else { + movdqa(xtmp, xcrc); + pclmulhdq(xtmp, xK); // [123:64] + pclmulldq(xcrc, xK); // [63:0] + pxor(xcrc, xtmp); + movdqu(xtmp, Address(buf, offset)); + pxor(xcrc, xtmp); + } } void MacroAssembler::fold_128bit_crc32(XMMRegister xcrc, XMMRegister xK, XMMRegister xtmp, XMMRegister xbuf) { + if (UseAVX > 0) { vpclmulhdq(xtmp, xK, xcrc); vpclmulldq(xcrc, xK, xcrc); pxor(xcrc, xbuf); pxor(xcrc, xtmp); + } else { + movdqa(xtmp, xcrc); + pclmulhdq(xtmp, xK); + pclmulldq(xcrc, xK); + pxor(xcrc, xbuf); + pxor(xcrc, xtmp); + } } /** * 8-bit folds to compute 32-bit CRC *
*** 7442,7454 **** --- 7459,7479 ---- jccb(Assembler::greater, L_fold_tail_loop); // Fold 128 bits in xmm1 down into 32 bits in crc register. BIND(L_fold_128b); movdqu(xmm0, ExternalAddress(StubRoutines::x86::crc_by128_masks_addr())); + if (UseAVX > 0) { vpclmulqdq(xmm2, xmm0, xmm1, 0x1); vpand(xmm3, xmm0, xmm2, false /* vector256 */); vpclmulqdq(xmm0, xmm0, xmm3, 0x1); + } else { + movdqa(xmm2, xmm0); + pclmulqdq(xmm2, xmm1, 0x1); + movdqa(xmm3, xmm0); + pand(xmm3, xmm2); + pclmulqdq(xmm0, xmm3, 0x1); + } psrldq(xmm1, 8); psrldq(xmm2, 4); pxor(xmm0, xmm1); pxor(xmm0, xmm2);

src/cpu/x86/vm/macroAssembler_x86.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File