< prev index next >
src/hotspot/cpu/aarch64/macroAssembler_aarch64.hpp
Print this page
rev 60622 : 8248672: utilities: Disallow cmp method using C++ feature
Reviewed-by:
Contributed-by: mbeckwit, luhenry, burban
rev 60623 : 8248500: AArch64: Remove the r18 dependency on Windows AArch64
Reviewed-by:
Contributed-by: mbeckwit, luhenry, burban
rev 60626 : 8248663: AArch64: Avoid existing macros/keywords of MSVC
Reviewed-by:
Contributed-by: mbeckwit, luhenry, burban
rev 60627 : 8248681: AArch64: MSVC doesn't support __PRETTY_FUNCTION__
Reviewed-by:
Contributed-by: mbeckwit, luhenry, burban
@@ -190,19 +190,27 @@
virtual void _call_Unimplemented(address call_site) {
mov(rscratch2, call_site);
}
+// Microsoft's MSVC team thinks that the __FUNCSIG__ is approximately (sympathy for calling conventions) equivalent to __PRETTY_FUNCTION__
+// Also, from Clang patch: "It is very similar to GCC's PRETTY_FUNCTION, except it prints the calling convention."
+// https://reviews.llvm.org/D3311
+
+#ifdef _WIN64
+#define call_Unimplemented() _call_Unimplemented((address)__FUNCSIG__)
+#else
#define call_Unimplemented() _call_Unimplemented((address)__PRETTY_FUNCTION__)
+#endif
// aliases defined in AARCH64 spec
template<class T>
inline void cmpw(Register Rd, T imm) { subsw(zr, Rd, imm); }
inline void cmp(Register Rd, unsigned char imm8) { subs(zr, Rd, imm8); }
- inline void cmp(Register Rd, unsigned imm) __attribute__ ((deprecated));
+ inline void cmp(Register Rd, unsigned imm) = delete;
inline void cmnw(Register Rd, unsigned imm) { addsw(zr, Rd, imm); }
inline void cmn(Register Rd, unsigned imm) { adds(zr, Rd, imm); }
void cset(Register Rd, Assembler::Condition cond) {
@@ -472,10 +480,12 @@
void pop(RegSet regs, Register stack) { if (regs.bits()) pop(regs.bits(), stack); }
void push_fp(RegSet regs, Register stack) { if (regs.bits()) push_fp(regs.bits(), stack); }
void pop_fp(RegSet regs, Register stack) { if (regs.bits()) pop_fp(regs.bits(), stack); }
+ static RegSet call_clobbered_registers();
+
// Push and pop everything that might be clobbered by a native
// runtime call except rscratch1 and rscratch2. (They are always
// scratch, so we don't have to protect them.) Only save the lower
// 64 bits of each vector register. Additonal registers can be excluded
// in a passed RegSet.
@@ -535,23 +545,23 @@
public:
// Generalized Test Bit And Branch, including a "far" variety which
// spans more than 32KiB.
- void tbr(Condition cond, Register Rt, int bitpos, Label &dest, bool far = false) {
+ void tbr(Condition cond, Register Rt, int bitpos, Label &dest, bool isfar = false) {
assert(cond == EQ || cond == NE, "must be");
- if (far)
+ if (isfar)
cond = ~cond;
void (Assembler::* branch)(Register Rt, int bitpos, Label &L);
if (cond == Assembler::EQ)
branch = &Assembler::tbz;
else
branch = &Assembler::tbnz;
- if (far) {
+ if (isfar) {
Label L;
(this->*branch)(Rt, bitpos, L);
b(dest);
bind(L);
} else {
< prev index next >