--- a/src/hotspot/cpu/aarch64/compiledIC_aarch64.cpp Fri Oct 11 08:49:42 2019 -0400
+++ b/src/hotspot/cpu/aarch64/compiledIC_aarch64.cpp Thu Oct 10 10:59:13 2019 +0100
@@ -185,6 +185,10 @@
NativeMovConstReg* method_holder
= nativeMovConstReg_at(stub + NativeInstruction::instruction_size);
method_holder->set_data(0);
+ if (!static_stub->is_aot()) {
+ NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
+ jump->set_jump_destination((address)-1);
+ }
}
//-----------------------------------------------------------------------------
--- a/src/hotspot/cpu/aarch64/nativeInst_aarch64.cpp Fri Oct 11 08:49:42 2019 -0400
+++ b/src/hotspot/cpu/aarch64/nativeInst_aarch64.cpp Thu Oct 10 10:59:13 2019 +0100
@@ -332,9 +332,14 @@
// We use jump to self as the unresolved address which the inline
// cache code (and relocs) know about
+ // As a special case we also use sequence movptr(r,0); br(r);
+ // i.e. jump to 0 when we need leave space for a wide immediate
+ // load
- // return -1 if jump to self
- dest = (dest == (address) this) ? (address) -1 : dest;
+ // return -1 if jump to self or to 0
+ if ((dest == (address)this) || dest == 0) {
+ dest = (address) -1;
+ }
return dest;
}
@@ -356,9 +361,13 @@
// We use jump to self as the unresolved address which the inline
// cache code (and relocs) know about
+ // As a special case we also use jump to 0 when first generating
+ // a general jump
- // return -1 if jump to self
- dest = (dest == (address) this) ? (address) -1 : dest;
+ // return -1 if jump to self or to 0
+ if ((dest == (address)this) || dest == 0) {
+ dest = (address) -1;
+ }
return dest;
}