--- a/src/hotspot/cpu/aarch64/c1_LIRAssembler_aarch64.cpp Fri Apr 05 10:01:09 2019 +0100
+++ b/src/hotspot/cpu/aarch64/c1_LIRAssembler_aarch64.cpp Fri Apr 05 09:53:07 2019 -0400
@@ -34,6 +34,7 @@
#include "c1/c1_ValueStack.hpp"
#include "ci/ciArrayKlass.hpp"
#include "ci/ciInstance.hpp"
+#include "code/compiledIC.hpp"
#include "gc/shared/barrierSet.hpp"
#include "gc/shared/cardTableBarrierSet.hpp"
#include "gc/shared/collectedHeap.hpp"
@@ -2063,11 +2064,10 @@
int start = __ offset();
__ relocate(static_stub_Relocation::spec(call_pc));
- __ mov_metadata(rmethod, (Metadata*)NULL);
- __ movptr(rscratch1, 0);
- __ br(rscratch1);
-
- assert(__ offset() - start <= call_stub_size(), "stub too big");
+ __ emit_static_call_stub();
+
+ assert(__ offset() - start + CompiledStaticCall::to_trampoline_stub_size()
+ <= call_stub_size(), "stub too big");
__ end_a_stub();
}
--- a/src/hotspot/cpu/aarch64/c1_LIRAssembler_aarch64.hpp Fri Apr 05 10:01:09 2019 +0100
+++ b/src/hotspot/cpu/aarch64/c1_LIRAssembler_aarch64.hpp Fri Apr 05 09:53:07 2019 -0400
@@ -69,7 +69,9 @@
void deoptimize_trap(CodeEmitInfo *info);
enum {
- _call_stub_size = 12 * NativeInstruction::instruction_size,
+ // call stub: CompiledStaticCall::to_interp_stub_size() +
+ // CompiledStaticCall::to_trampoline_stub_size()
+ _call_stub_size = 13 * NativeInstruction::instruction_size,
_call_aot_stub_size = 0,
_exception_handler_size = DEBUG_ONLY(1*K) NOT_DEBUG(175),
_deopt_handler_size = 7 * NativeInstruction::instruction_size
--- a/src/hotspot/cpu/aarch64/compiledIC_aarch64.cpp Fri Apr 05 10:01:09 2019 +0100
+++ b/src/hotspot/cpu/aarch64/compiledIC_aarch64.cpp Fri Apr 05 09:53:07 2019 -0400
@@ -61,14 +61,14 @@
// Don't create a Metadata reloc if we're generating immutable PIC.
if (cbuf.immutable_PIC()) {
__ movptr(rmethod, 0);
- } else {
- __ mov_metadata(rmethod, (Metadata*)NULL);
+ __ movptr(rscratch1, 0);
+ __ br(rscratch1);
+
+ } else
+#endif
+ {
+ __ emit_static_call_stub();
}
-#else
- __ mov_metadata(rmethod, (Metadata*)NULL);
-#endif
- __ movptr(rscratch1, 0);
- __ br(rscratch1);
assert((__ offset() - offset) <= (int)to_interp_stub_size(), "stub too big");
__ end_a_stub();
@@ -77,7 +77,8 @@
#undef __
int CompiledStaticCall::to_interp_stub_size() {
- return 7 * NativeInstruction::instruction_size;
+ // isb; movk; movz; movz; movk; movz; movz; br
+ return 8 * NativeInstruction::instruction_size;
}
int CompiledStaticCall::to_trampoline_stub_size() {
@@ -159,7 +160,8 @@
}
// Creation also verifies the object.
- NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
+ NativeMovConstReg* method_holder
+ = nativeMovConstReg_at(stub + NativeInstruction::instruction_size);
#ifndef PRODUCT
NativeGeneralJump* jump = nativeGeneralJump_at(method_holder->next_instruction_address());
@@ -184,7 +186,8 @@
assert(stub != NULL, "stub not found");
assert(CompiledICLocker::is_safe(stub), "mt unsafe call");
// Creation also verifies the object.
- NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
+ NativeMovConstReg* method_holder
+ = nativeMovConstReg_at(stub + NativeInstruction::instruction_size);
method_holder->set_data(0);
}
@@ -201,8 +204,9 @@
address stub = find_stub(false /* is_aot */);
assert(stub != NULL, "no stub found for static call");
// Creation also verifies the object.
- NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
- NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
+ NativeMovConstReg* method_holder
+ = nativeMovConstReg_at(stub + NativeInstruction::instruction_size);
+ NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
// Verify state.
assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
--- a/src/hotspot/cpu/aarch64/macroAssembler_aarch64.cpp Fri Apr 05 10:01:09 2019 +0100
+++ b/src/hotspot/cpu/aarch64/macroAssembler_aarch64.cpp Fri Apr 05 09:53:07 2019 -0400
@@ -812,6 +812,18 @@
return stub_start_addr;
}
+void MacroAssembler::emit_static_call_stub() {
+ // CompiledDirectStaticCall::set_to_interpreted knows the
+ // exact layout of this stub.
+
+ isb();
+ mov_metadata(rmethod, (Metadata*)NULL);
+
+ // Jump to the entry point of the i2c stub.
+ movptr(rscratch1, 0);
+ br(rscratch1);
+}
+
void MacroAssembler::c2bool(Register x) {
// implements x == 0 ? 0 : 1
// note: must only look at least-significant byte of x
--- a/src/hotspot/cpu/aarch64/macroAssembler_aarch64.hpp Fri Apr 05 10:01:09 2019 +0100
+++ b/src/hotspot/cpu/aarch64/macroAssembler_aarch64.hpp Fri Apr 05 09:53:07 2019 -0400
@@ -607,6 +607,7 @@
static int patch_narrow_klass(address insn_addr, narrowKlass n);
address emit_trampoline_stub(int insts_call_instruction_offset, address target);
+ void emit_static_call_stub();
// The following 4 methods return the offset of the appropriate move instruction
--- a/src/hotspot/cpu/aarch64/nativeInst_aarch64.cpp Fri Apr 05 10:01:09 2019 +0100
+++ b/src/hotspot/cpu/aarch64/nativeInst_aarch64.cpp Fri Apr 05 09:53:07 2019 -0400
@@ -232,7 +232,11 @@
//-------------------------------------------------------------------
void NativeMovConstReg::verify() {
- // make sure code pattern is actually mov reg64, imm64 instructions
+ if (! (nativeInstruction_at(instruction_address())->is_movz() ||
+ is_adrp_at(instruction_address()) ||
+ is_ldr_literal_at(instruction_address())) ) {
+ fatal("should be MOVZ or ADRP or LDR (literal)");
+ }
}