14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 */
23 package org.graalvm.compiler.asm.amd64;
24
25 import static jdk.vm.ci.amd64.AMD64.rax;
26 import static jdk.vm.ci.amd64.AMD64.rcx;
27 import static jdk.vm.ci.amd64.AMD64.rdx;
28 import static jdk.vm.ci.amd64.AMD64.rsp;
29 import static org.graalvm.compiler.asm.amd64.AMD64AsmOptions.UseIncDec;
30 import static org.graalvm.compiler.asm.amd64.AMD64AsmOptions.UseXmmLoadAndClearUpper;
31 import static org.graalvm.compiler.asm.amd64.AMD64AsmOptions.UseXmmRegToRegMoveAll;
32
33 import org.graalvm.compiler.asm.Label;
34 import org.graalvm.compiler.core.common.NumUtil;
35 import org.graalvm.compiler.asm.amd64.AMD64Address.Scale;
36
37 import jdk.vm.ci.amd64.AMD64;
38 import jdk.vm.ci.amd64.AMD64Kind;
39 import jdk.vm.ci.code.Register;
40 import jdk.vm.ci.code.TargetDescription;
41
42 /**
43 * This class implements commonly used X86 code patterns.
44 */
45 public class AMD64MacroAssembler extends AMD64Assembler {
46
47 public AMD64MacroAssembler(TargetDescription target) {
48 super(target);
49 }
50
51 public final void decrementq(Register reg, int value) {
52 if (value == Integer.MIN_VALUE) {
53 subq(reg, value);
54 return;
55 }
262 }
263
264 public void movdbl(AMD64Address dst, Register src) {
265 assert src.getRegisterCategory().equals(AMD64.XMM);
266 movsd(dst, src);
267 }
268
269 /**
270 * Non-atomic write of a 64-bit constant to memory. Do not use if the address might be a
271 * volatile field!
272 */
273 public final void movlong(AMD64Address dst, long src) {
274 if (NumUtil.isInt(src)) {
275 AMD64MIOp.MOV.emit(this, OperandSize.QWORD, dst, (int) src);
276 } else {
277 AMD64Address high = new AMD64Address(dst.getBase(), dst.getIndex(), dst.getScale(), dst.getDisplacement() + 4);
278 movl(dst, (int) (src & 0xFFFFFFFF));
279 movl(high, (int) (src >> 32));
280 }
281
282 }
283
284 public final void flog(Register dest, Register value, boolean base10) {
285 if (base10) {
286 fldlg2();
287 } else {
288 fldln2();
289 }
290 AMD64Address tmp = trigPrologue(value);
291 fyl2x();
292 trigEpilogue(dest, tmp);
293 }
294
295 public final void fsin(Register dest, Register value) {
296 AMD64Address tmp = trigPrologue(value);
297 fsin();
298 trigEpilogue(dest, tmp);
299 }
300
301 public final void fcos(Register dest, Register value) {
|
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 */
23 package org.graalvm.compiler.asm.amd64;
24
25 import static jdk.vm.ci.amd64.AMD64.rax;
26 import static jdk.vm.ci.amd64.AMD64.rcx;
27 import static jdk.vm.ci.amd64.AMD64.rdx;
28 import static jdk.vm.ci.amd64.AMD64.rsp;
29 import static org.graalvm.compiler.asm.amd64.AMD64AsmOptions.UseIncDec;
30 import static org.graalvm.compiler.asm.amd64.AMD64AsmOptions.UseXmmLoadAndClearUpper;
31 import static org.graalvm.compiler.asm.amd64.AMD64AsmOptions.UseXmmRegToRegMoveAll;
32
33 import org.graalvm.compiler.asm.Label;
34 import org.graalvm.compiler.asm.amd64.AMD64Address.Scale;
35 import org.graalvm.compiler.core.common.NumUtil;
36
37 import jdk.vm.ci.amd64.AMD64;
38 import jdk.vm.ci.amd64.AMD64Kind;
39 import jdk.vm.ci.code.Register;
40 import jdk.vm.ci.code.TargetDescription;
41
42 /**
43 * This class implements commonly used X86 code patterns.
44 */
45 public class AMD64MacroAssembler extends AMD64Assembler {
46
47 public AMD64MacroAssembler(TargetDescription target) {
48 super(target);
49 }
50
51 public final void decrementq(Register reg, int value) {
52 if (value == Integer.MIN_VALUE) {
53 subq(reg, value);
54 return;
55 }
262 }
263
264 public void movdbl(AMD64Address dst, Register src) {
265 assert src.getRegisterCategory().equals(AMD64.XMM);
266 movsd(dst, src);
267 }
268
269 /**
270 * Non-atomic write of a 64-bit constant to memory. Do not use if the address might be a
271 * volatile field!
272 */
273 public final void movlong(AMD64Address dst, long src) {
274 if (NumUtil.isInt(src)) {
275 AMD64MIOp.MOV.emit(this, OperandSize.QWORD, dst, (int) src);
276 } else {
277 AMD64Address high = new AMD64Address(dst.getBase(), dst.getIndex(), dst.getScale(), dst.getDisplacement() + 4);
278 movl(dst, (int) (src & 0xFFFFFFFF));
279 movl(high, (int) (src >> 32));
280 }
281
282 }
283
284 public final void setl(ConditionFlag cc, Register dst) {
285 setb(cc, dst);
286 movzbl(dst, dst);
287 }
288
289 public final void setq(ConditionFlag cc, Register dst) {
290 setb(cc, dst);
291 movzbq(dst, dst);
292 }
293
294 public final void flog(Register dest, Register value, boolean base10) {
295 if (base10) {
296 fldlg2();
297 } else {
298 fldln2();
299 }
300 AMD64Address tmp = trigPrologue(value);
301 fyl2x();
302 trigEpilogue(dest, tmp);
303 }
304
305 public final void fsin(Register dest, Register value) {
306 AMD64Address tmp = trigPrologue(value);
307 fsin();
308 trigEpilogue(dest, tmp);
309 }
310
311 public final void fcos(Register dest, Register value) {
|