--- a/src/hotspot/cpu/aarch64/assembler_aarch64.hpp Wed May 22 06:29:08 2019 -0400
+++ b/src/hotspot/cpu/aarch64/assembler_aarch64.hpp Thu May 23 07:57:50 2019 -0400
@@ -1491,6 +1491,25 @@
INSN(eorw, 0, 0b10, 0);
INSN(andsw, 0, 0b11, 0);
+#undef INSN
+
+#define INSN(NAME, size, op, N) \
+ void NAME(Register Rd, Register Rn, Register Rm, \
+ enum shift_kind kind = LSL, unsigned shift = 0) { \
+ starti; \
+ f(N, 21); \
+ zrf(Rm, 16), zrf(Rn, 5), zrf(Rd, 0); \
+ op_shifted_reg(0b01010, kind, shift, size, op); \
+ } \
+ \
+ /* These instructions have no immediate form. Provide an overload so \
+ that if anyone does try to use an immediate operand -- this has \
+ happened! -- we'll get a compile-time error. */ \
+ void NAME(Register Rd, Register Rn, unsigned imm, \
+ enum shift_kind kind = LSL, unsigned shift = 0) { \
+ assert(false, " can't be used with immediate operand"); \
+ }
+
INSN(bic, 1, 0b00, 1);
INSN(orn, 1, 0b01, 1);
INSN(eon, 1, 0b10, 1);
--- a/src/hotspot/cpu/aarch64/c1_LIRAssembler_aarch64.cpp Wed May 22 06:29:08 2019 -0400
+++ b/src/hotspot/cpu/aarch64/c1_LIRAssembler_aarch64.cpp Thu May 23 07:57:50 2019 -0400
@@ -2268,7 +2268,7 @@
__ ldr(src, Address(sp, 4*BytesPerWord));
// r0 is -1^K where K == partial copied count
- __ eonw(rscratch1, r0, 0);
+ __ eonw(rscratch1, r0, zr);
// adjust length down and src/end pos up by partial copied count
__ subw(length, length, rscratch1);
__ addw(src_pos, src_pos, rscratch1);