--- a/make/autoconf/hotspot.m4 Mon May 14 15:43:00 2018 +0200
+++ b/make/autoconf/hotspot.m4 Mon May 14 12:03:59 2018 +0100
@@ -206,7 +206,7 @@
if test "x$ENABLE_AOT" = "xtrue"; then
# Only enable AOT on X64 platforms.
- if test "x$OPENJDK_TARGET_CPU" = "xx86_64"; then
+ if test "x$OPENJDK_TARGET_CPU" = "xx86_64" || test "x$OPENJDK_TARGET_CPU" = "xaarch64" ; then
if test -e "${TOPDIR}/src/jdk.aot"; then
if test -e "${TOPDIR}/src/jdk.internal.vm.compiler"; then
ENABLE_AOT="true"
--- a/make/hotspot/lib/JvmFeatures.gmk Mon May 14 15:43:00 2018 +0200
+++ b/make/hotspot/lib/JvmFeatures.gmk Mon May 14 12:03:59 2018 +0100
@@ -128,8 +128,9 @@
ifneq ($(call check-jvm-feature, aot), true)
JVM_CFLAGS_FEATURES += -DINCLUDE_AOT=0
JVM_EXCLUDE_FILES += \
- compiledIC_aot_x86_64.cpp compilerRuntime.cpp \
- aotCodeHeap.cpp aotCompiledMethod.cpp aotLoader.cpp compiledIC_aot.cpp
+ compiledIC_aot_x86_64.cpp compiledIC_aot_aarch64.cpp \
+ compilerRuntime.cpp aotCodeHeap.cpp aotCompiledMethod.cpp \
+ aotLoader.cpp compiledIC_aot.cpp
endif
ifneq ($(call check-jvm-feature, cmsgc), true)
--- a/make/launcher/Launcher-jdk.aot.gmk Mon May 14 15:43:00 2018 +0200
+++ b/make/launcher/Launcher-jdk.aot.gmk Mon May 14 12:03:59 2018 +0100
@@ -41,6 +41,7 @@
, \
JAVA_ARGS := --add-exports=jdk.internal.vm.ci/jdk.vm.ci.hotspot.aarch64=$(call CommaList, jdk.internal.vm.compiler jdk.aot) \
--add-exports=jdk.internal.vm.ci/jdk.vm.ci.hotspot.amd64=$(call CommaList, jdk.internal.vm.compiler jdk.aot) \
+ --add-exports=jdk.internal.vm.ci/jdk.vm.ci.hotspot.aarch64=$(call CommaList, jdk.internal.vm.compiler jdk.aot) \
--add-exports=jdk.internal.vm.ci/jdk.vm.ci.hotspot.sparc=$(call CommaList, jdk.internal.vm.compiler jdk.aot) \
--add-exports=jdk.internal.vm.ci/jdk.vm.ci.meta=$(call CommaList, jdk.internal.vm.compiler jdk.aot) \
--add-exports=jdk.internal.vm.ci/jdk.vm.ci.runtime=$(call CommaList, jdk.internal.vm.compiler jdk.aot) \
--- a/src/hotspot/cpu/aarch64/compiledIC_aarch64.cpp Mon May 14 15:43:00 2018 +0200
+++ b/src/hotspot/cpu/aarch64/compiledIC_aarch64.cpp Mon May 14 12:03:59 2018 +0100
@@ -1,6 +1,6 @@
/*
* Copyright (c) 1997, 2016, Oracle and/or its affiliates. All rights reserved.
- * Copyright (c) 2014, Red Hat Inc. All rights reserved.
+ * Copyright (c) 2014, 2018, Red Hat Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -56,8 +56,17 @@
}
// static stub relocation stores the instruction address of the call
__ relocate(static_stub_Relocation::spec(mark));
- // static stub relocation also tags the Method* in the code-stream.
+
+#if INCLUDE_AOT
+ // Don't create a Metadata reloc if we're generating immutable PIC.
+ if (cbuf.immutable_PIC()) {
+ __ movptr(rmethod, 0);
+ } else {
+ __ mov_metadata(rmethod, (Metadata*)NULL);
+ }
+#else
__ mov_metadata(rmethod, (Metadata*)NULL);
+#endif
__ movptr(rscratch1, 0);
__ br(rscratch1);
@@ -83,6 +92,61 @@
return 4; // 3 in emit_to_interp_stub + 1 in emit_call
}
+#if INCLUDE_AOT
+#define __ _masm.
+void CompiledStaticCall::emit_to_aot_stub(CodeBuffer &cbuf, address mark) {
+ if (!UseAOT) {
+ return;
+ }
+ // Stub is fixed up when the corresponding call is converted from
+ // calling compiled code to calling aot code.
+ // mov r, imm64_aot_code_address
+ // jmp r
+
+ if (mark == NULL) {
+ mark = cbuf.insts_mark(); // Get mark within main instrs section.
+ }
+
+ // Note that the code buffer's insts_mark is always relative to insts.
+ // That's why we must use the macroassembler to generate a stub.
+ MacroAssembler _masm(&cbuf);
+
+ address base =
+ __ start_a_stub(to_aot_stub_size());
+ guarantee(base != NULL, "out of space");
+
+ // Static stub relocation stores the instruction address of the call.
+ __ relocate(static_stub_Relocation::spec(mark, true /* is_aot */));
+ // Load destination AOT code address.
+ __ movptr(rscratch1, 0); // address is zapped till fixup time.
+ // This is recognized as unresolved by relocs/nativeinst/ic code.
+ __ br(rscratch1);
+
+ assert(__ pc() - base <= to_aot_stub_size(), "wrong stub size");
+
+ // Update current stubs pointer and restore insts_end.
+ __ end_a_stub();
+}
+#undef __
+
+int CompiledStaticCall::to_aot_stub_size() {
+ if (UseAOT) {
+ return 5 * 4; // movz; movk; movk; movk; br
+ } else {
+ return 0;
+ }
+}
+
+// Relocation entries for call stub, compiled java to aot.
+int CompiledStaticCall::reloc_to_aot_stub() {
+ if (UseAOT) {
+ return 5 * 4; // movz; movk; movk; movk; br
+ } else {
+ return 0;
+ }
+}
+#endif // INCLUDE_AOT
+
void CompiledDirectStaticCall::set_to_interpreted(const methodHandle& callee, address entry) {
address stub = find_stub(false /* is_aot */);
guarantee(stub != NULL, "stub not found");
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/hotspot/cpu/aarch64/compiledIC_aot_aarch64.cpp Mon May 14 12:03:59 2018 +0100
@@ -0,0 +1,103 @@
+/*
+ * Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2018, Red Hat Inc. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+#include "aot/compiledIC_aot.hpp"
+#include "code/codeCache.hpp"
+#include "memory/resourceArea.hpp"
+
+void CompiledDirectStaticCall::set_to_far(const methodHandle& callee, address entry) {
+ if (TraceICs) {
+ ResourceMark rm;
+ tty->print_cr("CompiledDirectStaticCall@" INTPTR_FORMAT ": set_to_far %s",
+ p2i(instruction_address()),
+ callee->name_and_sig_as_C_string());
+ }
+
+ set_destination_mt_safe(entry);
+}
+
+void CompiledPltStaticCall::set_to_interpreted(const methodHandle& callee, address entry) {
+ address stub = find_stub();
+ guarantee(stub != NULL, "stub not found");
+ if (TraceICs) {
+ ResourceMark rm;
+ tty->print_cr("CompiledPltStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s",
+ p2i(instruction_address()),
+ callee->name_and_sig_as_C_string());
+ }
+
+ // Creation also verifies the object.
+ NativeLoadGot* method_loader = nativeLoadGot_at(stub);
+ NativeGotJump* jump = nativeGotJump_at(method_loader->next_instruction_address());
+
+ intptr_t data = method_loader->data();
+ address destination = jump->destination();
+ assert(data == 0 || data == (intptr_t)callee(),
+ "a) MT-unsafe modification of inline cache");
+ assert(destination == (address)Universe::non_oop_word()
+ || destination == entry,
+ "b) MT-unsafe modification of inline cache");
+
+ // Update stub.
+ method_loader->set_data((intptr_t)callee());
+ jump->set_jump_destination(entry);
+
+ // Update jump to call.
+ set_destination_mt_safe(stub);
+}
+
+#ifdef NEVER_CALLED
+void CompiledPltStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
+ assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
+ // Reset stub.
+ address stub = static_stub->addr();
+ assert(stub != NULL, "stub not found");
+ // Creation also verifies the object.
+ NativeLoadGot* method_loader = nativeLoadGot_at(stub);
+ NativeGotJump* jump = nativeGotJump_at(method_loader->next_instruction_address());
+ method_loader->set_data(0);
+ jump->set_jump_destination((address)-1);
+}
+#endif
+
+#ifndef PRODUCT
+void CompiledPltStaticCall::verify() {
+ // Verify call.
+ _call->verify();
+
+#ifdef ASSERT
+ CodeBlob *cb = CodeCache::find_blob_unsafe((address) _call);
+ assert(cb && cb->is_aot(), "CompiledPltStaticCall can only be used on AOTCompiledMethod");
+#endif
+
+ // Verify stub.
+ address stub = find_stub();
+ assert(stub != NULL, "no stub found for static call");
+ // Creation also verifies the object.
+ NativeLoadGot* method_loader = nativeLoadGot_at(stub);
+ NativeGotJump* jump = nativeGotJump_at(method_loader->next_instruction_address());
+ // Verify state.
+ assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
+}
+#endif // !PRODUCT
--- a/src/hotspot/cpu/aarch64/jvmciCodeInstaller_aarch64.cpp Mon May 14 15:43:00 2018 +0200
+++ b/src/hotspot/cpu/aarch64/jvmciCodeInstaller_aarch64.cpp Mon May 14 12:03:59 2018 +0100
@@ -35,6 +35,9 @@
return pc_offset + NativeCall::instruction_size;
} else if (inst->is_general_jump()) {
return pc_offset + NativeGeneralJump::instruction_size;
+ } else if (NativeInstruction::is_adrp_at((address)inst)) {
+ // adrp; add; blr
+ return pc_offset + 3 * NativeInstruction::instruction_size;
} else {
JVMCI_ERROR_0("unsupported type of instruction for call site");
}
@@ -81,7 +84,8 @@
void CodeInstaller::pd_patch_DataSectionReference(int pc_offset, int data_offset, TRAPS) {
address pc = _instructions->start() + pc_offset;
NativeInstruction* inst = nativeInstruction_at(pc);
- if (inst->is_adr_aligned() || inst->is_ldr_literal()) {
+ if (inst->is_adr_aligned() || inst->is_ldr_literal()
+ || (NativeInstruction::maybe_cpool_ref(pc))) {
address dest = _constants->start() + data_offset;
_instructions->relocate(pc, section_word_Relocation::spec((address) dest, CodeBuffer::SECT_CONSTS));
TRACE_jvmci_3("relocating at " PTR_FORMAT " (+%d) with destination at %d", p2i(pc), pc_offset, data_offset);
@@ -104,6 +108,10 @@
NativeGeneralJump* jump = nativeGeneralJump_at(pc);
jump->set_jump_destination((address) foreign_call_destination);
_instructions->relocate(jump->instruction_address(), runtime_call_Relocation::spec());
+ } else if (NativeInstruction::is_adrp_at((address)inst)) {
+ // adrp; add; blr
+ MacroAssembler::pd_patch_instruction_size((address)inst,
+ (address)foreign_call_destination);
} else {
JVMCI_ERROR("unknown call or jump instruction at " PTR_FORMAT, p2i(pc));
}
--- a/src/hotspot/cpu/aarch64/nativeInst_aarch64.cpp Mon May 14 15:43:00 2018 +0200
+++ b/src/hotspot/cpu/aarch64/nativeInst_aarch64.cpp Mon May 14 12:03:59 2018 +0100
@@ -36,7 +36,120 @@
#include "c1/c1_Runtime1.hpp"
#endif
-void NativeCall::verify() { ; }
+void NativeCall::verify() {
+ assert(NativeCall::is_call_at((address)this), "unexpected code at call site");
+}
+
+void NativeInstruction::wrote(int offset) {
+ ICache::invalidate_word(addr_at(offset));
+}
+
+void NativeLoadGot::report_and_fail() const {
+ tty->print_cr("Addr: " INTPTR_FORMAT, p2i(instruction_address()));
+ fatal("not a indirect rip mov to rbx");
+}
+
+void NativeLoadGot::verify() const {
+ assert(is_adrp_at((address)this), "must be adrp");
+}
+
+address NativeLoadGot::got_address() const {
+ return MacroAssembler::target_addr_for_insn((address)this);
+}
+
+intptr_t NativeLoadGot::data() const {
+ return *(intptr_t *) got_address();
+}
+
+address NativePltCall::destination() const {
+ NativeGotJump* jump = nativeGotJump_at(plt_jump());
+ return *(address*)MacroAssembler::target_addr_for_insn((address)jump);
+}
+
+address NativePltCall::plt_entry() const {
+ return MacroAssembler::target_addr_for_insn((address)this);
+}
+
+address NativePltCall::plt_jump() const {
+ address entry = plt_entry();
+ // Virtual PLT code has move instruction first
+ if (((NativeGotJump*)entry)->is_GotJump()) {
+ return entry;
+ } else {
+ return nativeLoadGot_at(entry)->next_instruction_address();
+ }
+}
+
+address NativePltCall::plt_load_got() const {
+ address entry = plt_entry();
+ if (!((NativeGotJump*)entry)->is_GotJump()) {
+ // Virtual PLT code has move instruction first
+ return entry;
+ } else {
+ // Static PLT code has move instruction second (from c2i stub)
+ return nativeGotJump_at(entry)->next_instruction_address();
+ }
+}
+
+address NativePltCall::plt_c2i_stub() const {
+ address entry = plt_load_got();
+ // This method should be called only for static calls which has C2I stub.
+ NativeLoadGot* load = nativeLoadGot_at(entry);
+ return entry;
+}
+
+address NativePltCall::plt_resolve_call() const {
+ NativeGotJump* jump = nativeGotJump_at(plt_jump());
+ address entry = jump->next_instruction_address();
+ if (((NativeGotJump*)entry)->is_GotJump()) {
+ return entry;
+ } else {
+ // c2i stub 2 instructions
+ entry = nativeLoadGot_at(entry)->next_instruction_address();
+ return nativeGotJump_at(entry)->next_instruction_address();
+ }
+}
+
+void NativePltCall::reset_to_plt_resolve_call() {
+ set_destination_mt_safe(plt_resolve_call());
+}
+
+void NativePltCall::set_destination_mt_safe(address dest) {
+ // rewriting the value in the GOT, it should always be aligned
+ NativeGotJump* jump = nativeGotJump_at(plt_jump());
+ address* got = (address *) jump->got_address();
+ *got = dest;
+}
+
+void NativePltCall::set_stub_to_clean() {
+ NativeLoadGot* method_loader = nativeLoadGot_at(plt_c2i_stub());
+ NativeGotJump* jump = nativeGotJump_at(method_loader->next_instruction_address());
+ method_loader->set_data(0);
+ jump->set_jump_destination((address)-1);
+}
+
+void NativePltCall::verify() const {
+ assert(NativeCall::is_call_at((address)this), "unexpected code at call site");
+}
+
+address NativeGotJump::got_address() const {
+ return MacroAssembler::target_addr_for_insn((address)this);
+}
+
+address NativeGotJump::destination() const {
+ address *got_entry = (address *) got_address();
+ return *got_entry;
+}
+
+bool NativeGotJump::is_GotJump() const {
+ NativeInstruction *insn =
+ nativeInstruction_at(addr_at(3 * NativeInstruction::instruction_size));
+ return insn->encoding() == 0xd61f0200; // br x16
+}
+
+void NativeGotJump::verify() const {
+ assert(is_adrp_at((address)this), "must be adrp");
+}
address NativeCall::destination() const {
address addr = (address)this;
@@ -71,6 +184,7 @@
ResourceMark rm;
int code_size = NativeInstruction::instruction_size;
address addr_call = addr_at(0);
+ bool reachable = Assembler::reachable_from_branch_at(addr_call, dest);
assert(NativeCall::is_call_at(addr_call), "unexpected code at call site");
// Patch the constant in the call's trampoline stub.
@@ -81,7 +195,7 @@
}
// Patch the call.
- if (Assembler::reachable_from_branch_at(addr_call, dest)) {
+ if (reachable) {
set_destination(dest);
} else {
assert (trampoline_stub_addr != NULL, "we need a trampoline");
@@ -103,9 +217,11 @@
is_NativeCallTrampolineStub_at(bl_destination))
return bl_destination;
- // If the codeBlob is not a nmethod, this is because we get here from the
- // CodeBlob constructor, which is called within the nmethod constructor.
- return trampoline_stub_Relocation::get_trampoline_for(call_addr, (nmethod*)code);
+ if (code->is_nmethod()) {
+ return trampoline_stub_Relocation::get_trampoline_for(call_addr, (nmethod*)code);
+ }
+
+ return NULL;
}
// Inserts a native call instruction at a given pc
@@ -340,9 +456,16 @@
void NativeJump::patch_verified_entry(address entry, address verified_entry, address dest) {
assert(dest == SharedRuntime::get_handle_wrong_method_stub(), "expected fixed destination of patch");
- assert(nativeInstruction_at(verified_entry)->is_jump_or_nop()
- || nativeInstruction_at(verified_entry)->is_sigill_zombie_not_entrant(),
- "Aarch64 cannot replace non-jump with jump");
+
+#ifdef ASSERT
+ // This may be the temporary nmethod generated while we're AOT
+ // compiling. Such an nmethod doesn't begin with a NOP but with an ADRP.
+ if (! (CalculateClassFingerprint && UseAOT && is_adrp_at(verified_entry))) {
+ assert(nativeInstruction_at(verified_entry)->is_jump_or_nop()
+ || nativeInstruction_at(verified_entry)->is_sigill_zombie_not_entrant(),
+ "Aarch64 cannot replace non-jump with jump");
+ }
+#endif
// Patch this nmethod atomically.
if (Assembler::reachable_from_branch_at(verified_entry, dest)) {
--- a/src/hotspot/cpu/aarch64/nativeInst_aarch64.hpp Mon May 14 15:43:00 2018 +0200
+++ b/src/hotspot/cpu/aarch64/nativeInst_aarch64.hpp Mon May 14 12:03:59 2018 +0100
@@ -1,6 +1,6 @@
/*
* Copyright (c) 1997, 2018, Oracle and/or its affiliates. All rights reserved.
- * Copyright (c) 2014, Red Hat Inc. All rights reserved.
+ * Copyright (c) 2014, 2108, Red Hat Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -95,6 +95,8 @@
void set_ptr_at (int offset, address ptr) { *(address*) addr_at(offset) = ptr; }
void set_oop_at (int offset, oop o) { *(oop*) addr_at(offset) = o; }
+ void wrote(int offset);
+
public:
// unit test stuff
@@ -148,6 +150,46 @@
return (NativeInstruction*)address;
}
+class NativePltCall: public NativeInstruction {
+public:
+ enum Arm_specific_constants {
+ instruction_size = 4,
+ instruction_offset = 0,
+ displacement_offset = 1,
+ return_address_offset = 4
+ };
+ address instruction_address() const { return addr_at(instruction_offset); }
+ address next_instruction_address() const { return addr_at(return_address_offset); }
+ address displacement_address() const { return addr_at(displacement_offset); }
+ int displacement() const { return (jint) int_at(displacement_offset); }
+ address return_address() const { return addr_at(return_address_offset); }
+ address destination() const;
+ address plt_entry() const;
+ address plt_jump() const;
+ address plt_load_got() const;
+ address plt_resolve_call() const;
+ address plt_c2i_stub() const;
+ void set_stub_to_clean();
+
+ void reset_to_plt_resolve_call();
+ void set_destination_mt_safe(address dest);
+
+ void verify() const;
+};
+
+inline NativePltCall* nativePltCall_at(address address) {
+ NativePltCall* call = (NativePltCall*) address;
+#ifdef ASSERT
+ call->verify();
+#endif
+ return call;
+}
+
+inline NativePltCall* nativePltCall_before(address addr) {
+ address at = addr - NativePltCall::instruction_size;
+ return nativePltCall_at(at);
+}
+
inline NativeCall* nativeCall_at(address address);
// The NativeCall is an abstraction for accessing/manipulating native
// call instructions (used to manipulate inline caches, primitive &
@@ -169,7 +211,7 @@
address return_address() const { return addr_at(return_address_offset); }
address destination() const;
- void set_destination(address dest) {
+ void set_destination(address dest) {
int offset = dest - instruction_address();
unsigned int insn = 0b100101 << 26;
assert((offset & 3) == 0, "should be");
@@ -191,6 +233,12 @@
return is_call_at(return_address - NativeCall::return_address_offset);
}
+#if INCLUDE_AOT
+ static bool is_far_call(address instr, address target) {
+ return !Assembler::reachable_from_branch_at(instr, target);
+ }
+#endif
+
// MT-safe patching of a call instruction.
static void insert(address code_pos, address entry);
@@ -381,6 +429,39 @@
static void test() {}
};
+// adrp x16, #page
+// add x16, x16, #offset
+// ldr x16, [x16]
+class NativeLoadGot: public NativeInstruction {
+public:
+ enum AArch64_specific_constants {
+ instruction_length = 4 * NativeInstruction::instruction_size,
+ offset_offset = 0,
+ };
+
+ address instruction_address() const { return addr_at(0); }
+ address return_address() const { return addr_at(instruction_length); }
+ address got_address() const;
+ address next_instruction_address() const { return return_address(); }
+ intptr_t data() const;
+ void set_data(intptr_t data) {
+ intptr_t *addr = (intptr_t *) got_address();
+ *addr = data;
+ }
+
+ void verify() const;
+private:
+ void report_and_fail() const;
+};
+
+inline NativeLoadGot* nativeLoadGot_at(address addr) {
+ NativeLoadGot* load = (NativeLoadGot*) addr;
+#ifdef ASSERT
+ load->verify();
+#endif
+ return load;
+}
+
class NativeJump: public NativeInstruction {
public:
enum AArch64_specific_constants {
@@ -441,6 +522,31 @@
return jump;
}
+class NativeGotJump: public NativeInstruction {
+public:
+ enum AArch64_specific_constants {
+ instruction_size = 4 * NativeInstruction::instruction_size,
+ };
+
+ void verify() const;
+ address instruction_address() const { return addr_at(0); }
+ address destination() const;
+ address return_address() const { return addr_at(instruction_size); }
+ address got_address() const;
+ address next_instruction_address() const { return addr_at(instruction_size); }
+ bool is_GotJump() const;
+
+ void set_jump_destination(address dest) {
+ address* got = (address *)got_address();
+ *got = dest;
+ }
+};
+
+inline NativeGotJump* nativeGotJump_at(address addr) {
+ NativeGotJump* jump = (NativeGotJump*)(addr);
+ return jump;
+}
+
class NativePopReg : public NativeInstruction {
public:
// Insert a pop instruction
--- a/src/hotspot/cpu/aarch64/sharedRuntime_aarch64.cpp Mon May 14 15:43:00 2018 +0200
+++ b/src/hotspot/cpu/aarch64/sharedRuntime_aarch64.cpp Mon May 14 12:03:59 2018 +0100
@@ -1,6 +1,6 @@
/*
* Copyright (c) 2003, 2018, Oracle and/or its affiliates. All rights reserved.
- * Copyright (c) 2014, 2015, Red Hat Inc. All rights reserved.
+ * Copyright (c) 2014, 2018, Red Hat Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -560,7 +560,7 @@
__ ldr(rscratch1, Address(rmethod, in_bytes(Method::from_compiled_offset())));
#if INCLUDE_JVMCI
- if (EnableJVMCI) {
+ if (EnableJVMCI || UseAOT) {
// check if this call should be routed towards a specific entry point
__ ldr(rscratch2, Address(rthread, in_bytes(JavaThread::jvmci_alternate_call_target_offset())));
Label no_alternative_target;
@@ -2278,7 +2278,7 @@
// Setup code generation tools
int pad = 0;
#if INCLUDE_JVMCI
- if (EnableJVMCI) {
+ if (EnableJVMCI || UseAOT) {
pad += 512; // Increase the buffer size when compiling for JVMCI
}
#endif
@@ -2360,7 +2360,7 @@
int implicit_exception_uncommon_trap_offset = 0;
int uncommon_trap_offset = 0;
- if (EnableJVMCI) {
+ if (EnableJVMCI || UseAOT) {
implicit_exception_uncommon_trap_offset = __ pc() - start;
__ ldr(lr, Address(rthread, in_bytes(JavaThread::jvmci_implicit_exception_pc_offset())));
@@ -2486,7 +2486,7 @@
__ reset_last_Java_frame(false);
#if INCLUDE_JVMCI
- if (EnableJVMCI) {
+ if (EnableJVMCI || UseAOT) {
__ bind(after_fetch_unroll_info_call);
}
#endif
@@ -2644,7 +2644,7 @@
_deopt_blob = DeoptimizationBlob::create(&buffer, oop_maps, 0, exception_offset, reexecute_offset, frame_size_in_words);
_deopt_blob->set_unpack_with_exception_in_tls_offset(exception_in_tls_offset);
#if INCLUDE_JVMCI
- if (EnableJVMCI) {
+ if (EnableJVMCI || UseAOT) {
_deopt_blob->set_uncommon_trap_offset(uncommon_trap_offset);
_deopt_blob->set_implicit_exception_uncommon_trap_offset(implicit_exception_uncommon_trap_offset);
}
--- a/src/hotspot/cpu/aarch64/templateInterpreterGenerator_aarch64.cpp Mon May 14 15:43:00 2018 +0200
+++ b/src/hotspot/cpu/aarch64/templateInterpreterGenerator_aarch64.cpp Mon May 14 12:03:59 2018 +0100
@@ -1,6 +1,6 @@
/*
* Copyright (c) 2003, 2018, Oracle and/or its affiliates. All rights reserved.
- * Copyright (c) 2014, Red Hat Inc. All rights reserved.
+ * Copyright (c) 2014, 2018, Red Hat Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -484,7 +484,7 @@
#if INCLUDE_JVMCI
// Check if we need to take lock at entry of synchronized method. This can
// only occur on method entry so emit it only for vtos with step 0.
- if (EnableJVMCI && state == vtos && step == 0) {
+ if ((EnableJVMCI || UseAOT) && state == vtos && step == 0) {
Label L;
__ ldr(rscratch1, Address(rthread, Thread::pending_exception_offset()));
__ cbz(rscratch1, L);
--- a/src/hotspot/share/asm/codeBuffer.hpp Mon May 14 15:43:00 2018 +0200
+++ b/src/hotspot/share/asm/codeBuffer.hpp Mon May 14 12:03:59 2018 +0100
@@ -382,6 +382,10 @@
address _last_insn; // used to merge consecutive memory barriers, loads or stores.
+#if INCLUDE_AOT
+ bool _immutable_PIC;
+#endif
+
address _decode_begin; // start address for decode
address decode_begin();
@@ -396,6 +400,9 @@
_overflow_arena = NULL;
_code_strings = CodeStrings();
_last_insn = NULL;
+#if INCLUDE_AOT
+ _immutable_PIC = false;
+#endif
}
void initialize(address code_start, csize_t code_size) {
@@ -629,6 +636,13 @@
// Log a little info about section usage in the CodeBuffer
void log_section_sizes(const char* name);
+#if INCLUDE_AOT
+ // True if this is a code buffer used for immutable PIC, i.e. AOT
+ // compilation.
+ bool immutable_PIC() { return _immutable_PIC; }
+ void set_immutable_PIC(bool pic) { _immutable_PIC = pic; }
+#endif
+
#ifndef PRODUCT
public:
// Printing / Decoding
--- a/src/hotspot/share/jvmci/jvmciCodeInstaller.cpp Mon May 14 15:43:00 2018 +0200
+++ b/src/hotspot/share/jvmci/jvmciCodeInstaller.cpp Mon May 14 12:03:59 2018 +0100
@@ -591,6 +591,9 @@
// Get instructions and constants CodeSections early because we need it.
_instructions = buffer.insts();
_constants = buffer.consts();
+#if INCLUDE_AOT
+ buffer.set_immutable_PIC(_immutable_pic_compilation);
+#endif
initialize_fields(target(), JNIHandles::resolve(compiled_code_obj), CHECK_OK);
JVMCIEnv::CodeInstallResult result = initialize_buffer(buffer, false, CHECK_OK);
@@ -624,6 +627,9 @@
// Get instructions and constants CodeSections early because we need it.
_instructions = buffer.insts();
_constants = buffer.consts();
+#if INCLUDE_AOT
+ buffer.set_immutable_PIC(_immutable_pic_compilation);
+#endif
initialize_fields(target(), JNIHandles::resolve(compiled_code_obj), CHECK_OK);
JVMCIEnv::CodeInstallResult result = initialize_buffer(buffer, true, CHECK_OK);
--- a/src/jdk.aot/share/classes/jdk.tools.jaotc.binformat/src/jdk/tools/jaotc/binformat/BinaryContainer.java Mon May 14 15:43:00 2018 +0200
+++ b/src/jdk.aot/share/classes/jdk.tools.jaotc.binformat/src/jdk/tools/jaotc/binformat/BinaryContainer.java Mon May 14 12:03:59 2018 +0100
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2016, 2017, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2016, 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -528,7 +528,7 @@
switch (osName) {
case "Linux":
case "SunOS":
- JELFRelocObject elfobj = new JELFRelocObject(this, outputFileName);
+ JELFRelocObject elfobj = JELFRelocObject.newInstance(this, outputFileName);
elfobj.createELFRelocObject(relocationTable, symbolTable.values());
break;
case "Mac OS X":
@@ -576,7 +576,7 @@
* @param info relocation information to be added
*/
public void addRelocation(Relocation info) {
- // System.out.println("# Relocation [" + symName + "] [" + info.getOffset() + "] [" +
+ // System.out.println("# Relocation [" + info.getSymbol() + "] [" + info.getOffset() + "] [" +
// info.getSection().getContainerName() + "] [" + info.getSymbol().getName() + "] [" +
// info.getSymbol().getOffset() + " @ " + info.getSymbol().getSection().getContainerName() +
// "]");
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/jdk.aot/share/classes/jdk.tools.jaotc.binformat/src/jdk/tools/jaotc/binformat/elf/AArch64JELFRelocObject.java Mon May 14 12:03:59 2018 +0100
@@ -0,0 +1,134 @@
+/*
+ * Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2018, Red Hat Inc. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+package jdk.tools.jaotc.binformat.elf;
+
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+
+import jdk.tools.jaotc.binformat.BinaryContainer;
+import jdk.tools.jaotc.binformat.ByteContainer;
+import jdk.tools.jaotc.binformat.CodeContainer;
+import jdk.tools.jaotc.binformat.ReadOnlyDataContainer;
+import jdk.tools.jaotc.binformat.Relocation;
+import jdk.tools.jaotc.binformat.Relocation.RelocType;
+import jdk.tools.jaotc.binformat.Symbol;
+import jdk.tools.jaotc.binformat.Symbol.Binding;
+import jdk.tools.jaotc.binformat.Symbol.Kind;
+
+import jdk.tools.jaotc.binformat.elf.ElfSymbol;
+import jdk.tools.jaotc.binformat.elf.ElfTargetInfo;
+import jdk.tools.jaotc.binformat.elf.Elf.Elf64_Ehdr;
+import jdk.tools.jaotc.binformat.elf.Elf.Elf64_Shdr;
+import jdk.tools.jaotc.binformat.elf.Elf.Elf64_Sym;
+import jdk.tools.jaotc.binformat.elf.Elf.Elf64_Rela;
+
+
+public class AArch64JELFRelocObject extends JELFRelocObject {
+
+ AArch64JELFRelocObject(BinaryContainer binContainer, String outputFileName) {
+ super(binContainer, outputFileName);
+ }
+
+ void createRelocation(Symbol symbol, Relocation reloc, ElfRelocTable elfRelocTable) {
+ RelocType relocType = reloc.getType();
+
+ int elfRelocType = getELFRelocationType(relocType);
+ ElfSymbol sym = (ElfSymbol) symbol.getNativeSymbol();
+ int symno = sym.getIndex();
+ int sectindex = reloc.getSection().getSectionId();
+ int offset = reloc.getOffset();
+ int addend = 0;
+
+ switch (relocType) {
+ case STUB_CALL_DIRECT:
+ case JAVA_CALL_DIRECT: {
+ break;
+ }
+ case EXTERNAL_PLT_TO_GOT:
+ offset -= 16;
+ elfRelocTable.createRelocationEntry(sectindex, offset, symno, Elf64_Rela.R_AARCH64_ADR_PREL_PG_HI21, addend);
+ elfRelocTable.createRelocationEntry(sectindex, offset + 4, symno, Elf64_Rela.R_AARCH64_ADD_ABS_LO12_NC, addend);
+ return;
+
+ case FOREIGN_CALL_INDIRECT_GOT: {
+ break;
+ }
+ case METASPACE_GOT_REFERENCE: {
+ offset -= 4;
+
+ elfRelocTable.createRelocationEntry(sectindex, offset, symno, Elf64_Rela.R_AARCH64_ADR_PREL_PG_HI21, addend);
+ elfRelocTable.createRelocationEntry(sectindex, offset + 4, symno, Elf64_Rela.R_AARCH64_ADD_ABS_LO12_NC, addend);
+ return;
+ }
+ // break;
+ case JAVA_CALL_INDIRECT: {
+ addend = -4;
+ offset = offset + addend;
+ break;
+ }
+ case EXTERNAL_GOT_TO_PLT: {
+ // this is load time relocations
+ break;
+ }
+ default:
+ throw new InternalError("Unhandled relocation type: " + relocType);
+ }
+
+ elfRelocTable.createRelocationEntry(sectindex, offset, symno, elfRelocType, addend);
+ }
+
+ int getELFRelocationType(RelocType relocType) {
+ int elfRelocType = 0; // R_<ARCH>_NONE if #define'd to 0 for all values of ARCH
+ switch (ElfTargetInfo.getElfArch()) {
+ case Elf64_Ehdr.EM_AARCH64:
+ // Return R_X86_64_* entries based on relocType
+ if (relocType == RelocType.JAVA_CALL_DIRECT ||
+ relocType == RelocType.FOREIGN_CALL_INDIRECT_GOT) {
+ elfRelocType = Elf64_Rela.R_AARCH64_CALL26;
+ } else if (relocType == RelocType.STUB_CALL_DIRECT) {
+ elfRelocType = Elf64_Rela.R_AARCH64_CALL26;
+ } else if (relocType == RelocType.JAVA_CALL_INDIRECT) {
+ elfRelocType = Elf64_Rela.R_AARCH64_CALL26;
+ } else if (relocType == RelocType.METASPACE_GOT_REFERENCE ||
+ relocType == RelocType.EXTERNAL_PLT_TO_GOT) {
+ elfRelocType = Elf64_Rela.R_AARCH64_NONE;
+ } else if (relocType == RelocType.EXTERNAL_GOT_TO_PLT) {
+ elfRelocType = Elf64_Rela.R_AARCH64_ABS64;
+ } else {
+ assert false : "Unhandled relocation type: " + relocType;
+ }
+ break;
+
+ default:
+ System.out.println("Relocation Type mapping: Unhandled architecture: "
+ + ElfTargetInfo.getElfArch());
+ }
+ return elfRelocType;
+ }
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/jdk.aot/share/classes/jdk.tools.jaotc.binformat/src/jdk/tools/jaotc/binformat/elf/AMD64JELFRelocObject.java Mon May 14 12:03:59 2018 +0100
@@ -0,0 +1,127 @@
+/*
+ * Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+package jdk.tools.jaotc.binformat.elf;
+
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+
+import jdk.tools.jaotc.binformat.BinaryContainer;
+import jdk.tools.jaotc.binformat.ByteContainer;
+import jdk.tools.jaotc.binformat.CodeContainer;
+import jdk.tools.jaotc.binformat.ReadOnlyDataContainer;
+import jdk.tools.jaotc.binformat.Relocation;
+import jdk.tools.jaotc.binformat.Relocation.RelocType;
+import jdk.tools.jaotc.binformat.Symbol;
+import jdk.tools.jaotc.binformat.Symbol.Binding;
+import jdk.tools.jaotc.binformat.Symbol.Kind;
+
+import jdk.tools.jaotc.binformat.elf.ElfSymbol;
+import jdk.tools.jaotc.binformat.elf.ElfTargetInfo;
+import jdk.tools.jaotc.binformat.elf.Elf.Elf64_Ehdr;
+import jdk.tools.jaotc.binformat.elf.Elf.Elf64_Shdr;
+import jdk.tools.jaotc.binformat.elf.Elf.Elf64_Sym;
+import jdk.tools.jaotc.binformat.elf.Elf.Elf64_Rela;
+
+
+public class AMD64JELFRelocObject extends JELFRelocObject {
+
+ AMD64JELFRelocObject(BinaryContainer binContainer, String outputFileName) {
+ super(binContainer, outputFileName);
+ }
+
+ protected void createRelocation(Symbol symbol, Relocation reloc, ElfRelocTable elfRelocTable) {
+ RelocType relocType = reloc.getType();
+
+ int elfRelocType = getELFRelocationType(relocType);
+ ElfSymbol sym = (ElfSymbol) symbol.getNativeSymbol();
+ int symno = sym.getIndex();
+ int sectindex = reloc.getSection().getSectionId();
+ int offset = reloc.getOffset();
+ int addend = 0;
+
+ switch (relocType) {
+ case JAVA_CALL_DIRECT:
+ case STUB_CALL_DIRECT:
+ case FOREIGN_CALL_INDIRECT_GOT: {
+ // Create relocation entry
+ addend = -4; // Size in bytes of the patch location
+ // Relocation should be applied at the location after call operand
+ offset = offset + reloc.getSize() + addend;
+ break;
+ }
+ case JAVA_CALL_INDIRECT:
+ case METASPACE_GOT_REFERENCE:
+ case EXTERNAL_PLT_TO_GOT: {
+ addend = -4; // Size of 32-bit address of the GOT
+ /*
+ * Relocation should be applied before the test instruction to the move instruction.
+ * reloc.getOffset() points to the test instruction after the instruction that loads the address of
+ * polling page. So set the offset appropriately.
+ */
+ offset = offset + addend;
+ break;
+ }
+ case EXTERNAL_GOT_TO_PLT: {
+ // this is load time relocations
+ break;
+ }
+ default:
+ throw new InternalError("Unhandled relocation type: " + relocType);
+ }
+ elfRelocTable.createRelocationEntry(sectindex, offset, symno, elfRelocType, addend);
+ }
+
+ private int getELFRelocationType(RelocType relocType) {
+ int elfRelocType = 0; // R_<ARCH>_NONE if #define'd to 0 for all values of ARCH
+ switch (ElfTargetInfo.getElfArch()) {
+ case Elf64_Ehdr.EM_X86_64:
+ // Return R_X86_64_* entries based on relocType
+ if (relocType == RelocType.JAVA_CALL_DIRECT ||
+ relocType == RelocType.FOREIGN_CALL_INDIRECT_GOT) {
+ elfRelocType = Elf64_Rela.R_X86_64_PLT32;
+ } else if (relocType == RelocType.STUB_CALL_DIRECT) {
+ elfRelocType = Elf64_Rela.R_X86_64_PC32;
+ } else if (relocType == RelocType.JAVA_CALL_INDIRECT) {
+ elfRelocType = Elf64_Rela.R_X86_64_NONE;
+ } else if (relocType == RelocType.METASPACE_GOT_REFERENCE ||
+ relocType == RelocType.EXTERNAL_PLT_TO_GOT) {
+ elfRelocType = Elf64_Rela.R_X86_64_PC32;
+ } else if (relocType == RelocType.EXTERNAL_GOT_TO_PLT) {
+ elfRelocType = Elf64_Rela.R_X86_64_64;
+ } else {
+ assert false : "Unhandled relocation type: " + relocType;
+ }
+ break;
+
+ default:
+ System.out.println("Relocation Type mapping: Unhandled architecture: "
+ + ElfTargetInfo.getElfArch());
+ }
+ return elfRelocType;
+ }
+}
--- a/src/jdk.aot/share/classes/jdk.tools.jaotc.binformat/src/jdk/tools/jaotc/binformat/elf/Elf.java Mon May 14 15:43:00 2018 +0200
+++ b/src/jdk.aot/share/classes/jdk.tools.jaotc.binformat/src/jdk/tools/jaotc/binformat/elf/Elf.java Mon May 14 12:03:59 2018 +0100
@@ -1,5 +1,6 @@
/*
- * Copyright (c) 2016, 2017, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2016, 2018, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2018, Red Hat Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -208,12 +209,26 @@
/**
* Relocation types
*/
+
static final int R_X86_64_NONE = 0x0;
static final int R_X86_64_64 = 0x1;
static final int R_X86_64_PC32 = 0x2;
static final int R_X86_64_PLT32 = 0x4;
static final int R_X86_64_GOTPCREL = 0x9;
+ static final int R_AARCH64_NONE = 256;
+ static final int R_AARCH64_ABS64 = 257;
+ static final int R_AARCH64_CALL26 = 283;
+ static final int R_AARCH64_ADR_GOT_PAGE = 311;
+ static final int R_AARCH64_LD64_GOT_LO12_NC = 312;
+
+ static final int R_AARCH64_MOVW_UABS_G0_NC = 264;
+ static final int R_AARCH64_MOVW_UABS_G1_NC = 266;
+ static final int R_AARCH64_MOVW_UABS_G2_NC = 268;
+
+ static final int R_AARCH64_ADR_PREL_PG_HI21 = 275;
+ static final int R_AARCH64_ADD_ABS_LO12_NC = 277;
+ static final int R_AARCH64_LDST64_ABS_LO12_NC = 286;
}
/**
@@ -240,6 +255,20 @@
static final int R_X86_64_PLT32 = 0x4;
static final int R_X86_64_GOTPCREL = 0x9;
+ static final int R_AARCH64_NONE = 256;
+ static final int R_AARCH64_ABS64 = 257;
+ static final int R_AARCH64_CALL26 = 283;
+ static final int R_AARCH64_ADR_GOT_PAGE = 311;
+ static final int R_AARCH64_LD64_GOT_LO12_NC = 312;
+
+ static final int R_AARCH64_MOVW_UABS_G0_NC = 264;
+ static final int R_AARCH64_MOVW_UABS_G1_NC = 266;
+ static final int R_AARCH64_MOVW_UABS_G2_NC = 268;
+
+ static final int R_AARCH64_ADR_PREL_PG_HI21 = 275;
+ static final int R_AARCH64_ADD_ABS_LO12_NC = 277;
+ static final int R_AARCH64_LDST64_ABS_LO12_NC = 286;
+
static long ELF64_R_INFO(int symidx, int type) {
return (((long)symidx << 32) + type);
}
--- a/src/jdk.aot/share/classes/jdk.tools.jaotc.binformat/src/jdk/tools/jaotc/binformat/elf/ElfTargetInfo.java Mon May 14 15:43:00 2018 +0200
+++ b/src/jdk.aot/share/classes/jdk.tools.jaotc.binformat/src/jdk/tools/jaotc/binformat/elf/ElfTargetInfo.java Mon May 14 12:03:59 2018 +0100
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2016, 2017, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2016, 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -55,6 +55,8 @@
if (archStr.equals("amd64") || archStr.equals("x86_64")) {
arch = Elf64_Ehdr.EM_X86_64;
+ } else if (archStr.equals("aarch64")) {
+ arch = Elf64_Ehdr.EM_AARCH64;
} else {
System.out.println("Unsupported architecture " + archStr);
arch = Elf64_Ehdr.EM_NONE;
--- a/src/jdk.aot/share/classes/jdk.tools.jaotc.binformat/src/jdk/tools/jaotc/binformat/elf/JELFRelocObject.java Mon May 14 15:43:00 2018 +0200
+++ b/src/jdk.aot/share/classes/jdk.tools.jaotc.binformat/src/jdk/tools/jaotc/binformat/elf/JELFRelocObject.java Mon May 14 12:03:59 2018 +0100
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2016, 2017, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2016, 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -46,7 +46,7 @@
import jdk.tools.jaotc.binformat.elf.Elf.Elf64_Sym;
import jdk.tools.jaotc.binformat.elf.Elf.Elf64_Rela;
-public class JELFRelocObject {
+public abstract class JELFRelocObject {
private final BinaryContainer binContainer;
@@ -54,12 +54,22 @@
private final int segmentSize;
- public JELFRelocObject(BinaryContainer binContainer, String outputFileName) {
+ protected JELFRelocObject(BinaryContainer binContainer, String outputFileName) {
this.binContainer = binContainer;
this.elfContainer = new ElfContainer(outputFileName);
this.segmentSize = binContainer.getCodeSegmentSize();
}
+ public static JELFRelocObject newInstance(BinaryContainer binContainer, String outputFileName) {
+ String archStr = System.getProperty("os.arch").toLowerCase();
+ if (archStr.equals("amd64") || archStr.equals("x86_64")) {
+ return new AMD64JELFRelocObject(binContainer, outputFileName);
+ } else if (archStr.equals("aarch64")) {
+ return new AArch64JELFRelocObject(binContainer, outputFileName);
+ }
+ throw new InternalError("Unsupported platform: " + archStr);
+ }
+
private static ElfSection createByteSection(ArrayList<ElfSection> sections,
String sectName,
byte[] scnData,
@@ -295,75 +305,6 @@
return (elfRelocTable);
}
- private static void createRelocation(Symbol symbol, Relocation reloc, ElfRelocTable elfRelocTable) {
- RelocType relocType = reloc.getType();
-
- int elfRelocType = getELFRelocationType(relocType);
- ElfSymbol sym = (ElfSymbol) symbol.getNativeSymbol();
- int symno = sym.getIndex();
- int sectindex = reloc.getSection().getSectionId();
- int offset = reloc.getOffset();
- int addend = 0;
-
- switch (relocType) {
- case JAVA_CALL_DIRECT:
- case STUB_CALL_DIRECT:
- case FOREIGN_CALL_INDIRECT_GOT: {
- // Create relocation entry
- addend = -4; // Size in bytes of the patch location
- // Relocation should be applied at the location after call operand
- offset = offset + reloc.getSize() + addend;
- break;
- }
- case JAVA_CALL_INDIRECT:
- case METASPACE_GOT_REFERENCE:
- case EXTERNAL_PLT_TO_GOT: {
- addend = -4; // Size of 32-bit address of the GOT
- /*
- * Relocation should be applied before the test instruction to the move instruction.
- * reloc.getOffset() points to the test instruction after the instruction that loads the address of
- * polling page. So set the offset appropriately.
- */
- offset = offset + addend;
- break;
- }
- case EXTERNAL_GOT_TO_PLT: {
- // this is load time relocations
- break;
- }
- default:
- throw new InternalError("Unhandled relocation type: " + relocType);
- }
- elfRelocTable.createRelocationEntry(sectindex, offset, symno, elfRelocType, addend);
- }
-
- private static int getELFRelocationType(RelocType relocType) {
- int elfRelocType = 0; // R_<ARCH>_NONE if #define'd to 0 for all values of ARCH
- switch (ElfTargetInfo.getElfArch()) {
- case Elf64_Ehdr.EM_X86_64:
- // Return R_X86_64_* entries based on relocType
- if (relocType == RelocType.JAVA_CALL_DIRECT ||
- relocType == RelocType.FOREIGN_CALL_INDIRECT_GOT) {
- elfRelocType = Elf64_Rela.R_X86_64_PLT32;
- } else if (relocType == RelocType.STUB_CALL_DIRECT) {
- elfRelocType = Elf64_Rela.R_X86_64_PC32;
- } else if (relocType == RelocType.JAVA_CALL_INDIRECT) {
- elfRelocType = Elf64_Rela.R_X86_64_NONE;
- } else if (relocType == RelocType.METASPACE_GOT_REFERENCE ||
- relocType == RelocType.EXTERNAL_PLT_TO_GOT) {
- elfRelocType = Elf64_Rela.R_X86_64_PC32;
- } else if (relocType == RelocType.EXTERNAL_GOT_TO_PLT) {
- elfRelocType = Elf64_Rela.R_X86_64_64;
- } else {
- assert false : "Unhandled relocation type: " + relocType;
- }
- break;
- default:
- System.out.println("Relocation Type mapping: Unhandled architecture");
- }
- return elfRelocType;
- }
-
private static void createElfRelocSections(ArrayList<ElfSection> sections,
ElfRelocTable elfRelocTable,
int symtabsectidx) {
@@ -383,4 +324,7 @@
}
}
}
+
+ abstract void createRelocation(Symbol symbol, Relocation reloc, ElfRelocTable elfRelocTable);
+
}
--- a/src/jdk.aot/share/classes/jdk.tools.jaotc/src/jdk/tools/jaotc/CodeSectionProcessor.java Mon May 14 15:43:00 2018 +0200
+++ b/src/jdk.aot/share/classes/jdk.tools.jaotc/src/jdk/tools/jaotc/CodeSectionProcessor.java Mon May 14 12:03:59 2018 +0100
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2016, 2017, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2016, 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -32,6 +32,8 @@
import org.graalvm.compiler.code.CompilationResult;
import org.graalvm.compiler.hotspot.HotSpotForeignCallLinkage;
+import jdk.vm.ci.aarch64.AArch64;
+import jdk.vm.ci.amd64.AMD64;
import jdk.vm.ci.code.TargetDescription;
import jdk.vm.ci.code.site.Call;
import jdk.vm.ci.code.site.Infopoint;
@@ -72,7 +74,8 @@
for (Infopoint infopoint : compResult.getInfopoints()) {
if (infopoint.reason == InfopointReason.CALL) {
final Call callInfopoint = (Call) infopoint;
- if (callInfopoint.target instanceof HotSpotForeignCallLinkage) {
+ if (callInfopoint.target instanceof HotSpotForeignCallLinkage &&
+ target.arch instanceof AMD64) {
// TODO 4 is x86 size of relative displacement.
// For SPARC need something different.
int destOffset = infopoint.pcOffset + callInfopoint.size - 4;
--- a/src/jdk.aot/share/classes/jdk.tools.jaotc/src/jdk/tools/jaotc/CompiledMethodInfo.java Mon May 14 15:43:00 2018 +0200
+++ b/src/jdk.aot/share/classes/jdk.tools.jaotc/src/jdk/tools/jaotc/CompiledMethodInfo.java Mon May 14 12:03:59 2018 +0100
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2016, 2017, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2016, 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -39,6 +39,8 @@
final class CompiledMethodInfo {
+ static final String archStr = System.getProperty("os.arch").toLowerCase();
+
private static final int UNINITIALIZED_OFFSET = -1;
private static class AOTMethodOffsets {
@@ -304,10 +306,17 @@
boolean hasMark(Site call, MarkId id) {
for (Mark m : compilationResult.getMarks()) {
- // TODO: X64-specific code.
- // Call instructions are aligned to 8
- // bytes - 1 on x86 to patch address atomically,
- int adjOffset = (m.pcOffset & (-8)) + 7;
+ int adjOffset = m.pcOffset;
+ if (archStr.equals("aarch64")) {
+ // The mark is at the end of a group of three instructions:
+ // adrp; add; ldr
+ adjOffset += 12;
+ } else {
+ // X64-specific code.
+ // Call instructions are aligned to 8
+ // bytes - 1 on x86 to patch address atomically,
+ adjOffset = (adjOffset & (-8)) + 7;
+ }
// Mark points before aligning nops.
if ((call.pcOffset == adjOffset) && MarkId.getEnum((int) m.id) == id) {
return true;
--- a/src/jdk.aot/share/classes/jdk.tools.jaotc/src/jdk/tools/jaotc/ELFMacroAssembler.java Mon May 14 15:43:00 2018 +0200
+++ b/src/jdk.aot/share/classes/jdk.tools.jaotc/src/jdk/tools/jaotc/ELFMacroAssembler.java Mon May 14 12:03:59 2018 +0100
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2016, 2017, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2016, 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -25,8 +25,10 @@
import jdk.tools.jaotc.StubInformation;
import jdk.tools.jaotc.amd64.AMD64ELFMacroAssembler;
+import jdk.tools.jaotc.aarch64.AArch64ELFMacroAssembler;
import jdk.vm.ci.amd64.AMD64;
+import jdk.vm.ci.aarch64.AArch64;
import jdk.vm.ci.code.Architecture;
import jdk.vm.ci.code.TargetDescription;
@@ -36,6 +38,8 @@
Architecture architecture = target.arch;
if (architecture instanceof AMD64) {
return new AMD64ELFMacroAssembler(target);
+ } else if (architecture instanceof AArch64) {
+ return new AArch64ELFMacroAssembler(target);
} else {
throw new InternalError("Unsupported architecture " + architecture);
}
--- a/src/jdk.aot/share/classes/jdk.tools.jaotc/src/jdk/tools/jaotc/InstructionDecoder.java Mon May 14 15:43:00 2018 +0200
+++ b/src/jdk.aot/share/classes/jdk.tools.jaotc/src/jdk/tools/jaotc/InstructionDecoder.java Mon May 14 12:03:59 2018 +0100
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2016, 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -24,8 +24,10 @@
package jdk.tools.jaotc;
import jdk.tools.jaotc.amd64.AMD64InstructionDecoder;
+import jdk.tools.jaotc.aarch64.AArch64InstructionDecoder;
import jdk.vm.ci.amd64.AMD64;
+import jdk.vm.ci.aarch64.AArch64;
import jdk.vm.ci.code.Architecture;
import jdk.vm.ci.code.TargetDescription;
@@ -35,6 +37,8 @@
Architecture architecture = target.arch;
if (architecture instanceof AMD64) {
return new AMD64InstructionDecoder(target);
+ } else if (architecture instanceof AArch64) {
+ return new AArch64InstructionDecoder(target);
} else {
throw new InternalError("Unsupported architecture " + architecture);
}
--- a/src/jdk.aot/share/classes/jdk.tools.jaotc/src/jdk/tools/jaotc/JavaCallSiteRelocationSymbol.java Mon May 14 15:43:00 2018 +0200
+++ b/src/jdk.aot/share/classes/jdk.tools.jaotc/src/jdk/tools/jaotc/JavaCallSiteRelocationSymbol.java Mon May 14 12:03:59 2018 +0100
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2016, 2017, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2016, 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -38,7 +38,19 @@
private static final byte[] zeroSlot = new byte[8];
// -1 represents Universe::non_oop_word() value
- private static final byte[] minusOneSlot = {-1, -1, -1, -1, -1, -1, -1, -1};
+ private static final byte[] minusOneSlot;
+
+ static {
+ String archStr = System.getProperty("os.arch").toLowerCase();
+ if (archStr.equals("aarch64")) {
+ // AArch64 is a special case: it uses 48-bit addresses.
+ byte[] non_oop_word = {-1, -1, -1, -1, -1, -1, 0, 0};
+ minusOneSlot = non_oop_word;
+ } else {
+ byte[] non_oop_word = {-1, -1, -1, -1, -1, -1, -1, -1};
+ minusOneSlot = non_oop_word;
+ }
+ }
JavaCallSiteRelocationSymbol(CompiledMethodInfo mi, Call call, CallSiteRelocationInfo callSiteRelocation, BinaryContainer binaryContainer) {
super(createPltEntrySymbol(binaryContainer, mi, call, callSiteRelocation));
@@ -123,6 +135,7 @@
*/
private static String getResolveSymbolName(CompiledMethodInfo mi, Call call) {
String resolveSymbolName;
+ String name = call.target.toString();
if (CallInfo.isStaticCall(call)) {
assert mi.hasMark(call, MarkId.INVOKESTATIC);
resolveSymbolName = BinaryContainer.getResolveStaticEntrySymbolName();
--- a/src/jdk.aot/share/classes/jdk.tools.jaotc/src/jdk/tools/jaotc/Linker.java Mon May 14 15:43:00 2018 +0200
+++ b/src/jdk.aot/share/classes/jdk.tools.jaotc/src/jdk/tools/jaotc/Linker.java Mon May 14 12:03:59 2018 +0100
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2017, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -69,6 +69,7 @@
if (name.endsWith(".so")) {
objectFileName = name.substring(0, name.length() - ".so".length());
}
+ objectFileName = objectFileName + ".o";
linkerPath = (options.linkerpath != null) ? options.linkerpath : "ld";
linkerCmd = linkerPath + " -shared -z noexecstack -o " + libraryFileName + " " + objectFileName;
linkerCheck = linkerPath + " -v";
@@ -130,7 +131,8 @@
throw new InternalError(errorMessage);
}
File objFile = new File(objectFileName);
- if (objFile.exists()) {
+ boolean keepObjFile = Boolean.parseBoolean(System.getProperty("aot.keep.objFile", "false"));
+ if (objFile.exists() && !keepObjFile) {
if (!objFile.delete()) {
throw new InternalError("Failed to delete " + objectFileName + " file");
}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/jdk.aot/share/classes/jdk.tools.jaotc/src/jdk/tools/jaotc/aarch64/AArch64ELFMacroAssembler.java Mon May 14 12:03:59 2018 +0100
@@ -0,0 +1,129 @@
+/*
+ * Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2018, Red Hat Inc. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+package jdk.tools.jaotc.aarch64;
+
+import jdk.tools.jaotc.StubInformation;
+import jdk.tools.jaotc.ELFMacroAssembler;
+
+import org.graalvm.compiler.asm.aarch64.AArch64Address;
+import org.graalvm.compiler.asm.aarch64.AArch64MacroAssembler;
+
+
+import jdk.vm.ci.code.TargetDescription;
+import jdk.vm.ci.code.Register;
+
+import static jdk.vm.ci.aarch64.AArch64.*;
+
+public final class AArch64ELFMacroAssembler extends AArch64MacroAssembler implements ELFMacroAssembler {
+
+ private int currentEndOfInstruction;
+
+ public AArch64ELFMacroAssembler(TargetDescription target) {
+ super(target);
+ }
+
+ @Override
+ public int currentEndOfInstruction() {
+ return currentEndOfInstruction;
+ }
+
+ @Override
+ public byte[] getPLTJumpCode() {
+ // The main dispatch instruction
+ addressOf(r16);
+ ldr(64, r16, AArch64Address.createBaseRegisterOnlyAddress(r16));
+ jmp(r16);
+
+ currentEndOfInstruction = position();
+
+ align(8);
+
+ return close(true);
+ }
+
+ @Override
+ public byte[] getPLTStaticEntryCode(StubInformation stub) {
+ // The main dispatch instruction
+ addressOf(r16);
+ ldr(64, r16, AArch64Address.createBaseRegisterOnlyAddress(r16));
+ jmp(r16);
+ stub.setDispatchJumpOffset(position());
+
+ // C2I stub used to call interpreter. First load r12
+ // (i.e. rmethod) with a pointer to the Method structure ...
+ addressOf(r12);
+ ldr(64, r12, AArch64Address.createBaseRegisterOnlyAddress(r12));
+ nop();
+ stub.setMovOffset(position());
+
+ // ... then jump to the interpreter.
+ addressOf(r16);
+ ldr(64, r16, AArch64Address.createBaseRegisterOnlyAddress(r16));
+ jmp(r16);
+ stub.setC2IJumpOffset(position());
+
+ // Call to VM runtime to resolve the call.
+ stub.setResolveJumpStart(position());
+ addressOf(r16);
+ ldr(64, r16, AArch64Address.createBaseRegisterOnlyAddress(r16));
+ jmp(r16);
+ stub.setResolveJumpOffset(position());
+ currentEndOfInstruction = position();
+
+ align(8);
+ stub.setSize(position());
+
+ return close(true);
+ }
+
+ @Override
+ public byte[] getPLTVirtualEntryCode(StubInformation stub) {
+ // Fixup an inline cache.
+ // Load r9 with a pointer to the Klass.
+ addressOf(r17);
+ ldr(64, r9, AArch64Address.createBaseRegisterOnlyAddress(r17));
+ nop();
+ stub.setMovOffset(position());
+
+ // Jump to the method.
+ addressOf(r16);
+ ldr(64, r16, AArch64Address.createBaseRegisterOnlyAddress(r16));
+ jmp(r16);
+ stub.setDispatchJumpOffset(position());
+
+ // Call to VM runtime to resolve the call.
+ stub.setResolveJumpStart(position());
+ addressOf(r16);
+ ldr(64, r16, AArch64Address.createBaseRegisterOnlyAddress(r16));
+ jmp(r16);
+ stub.setResolveJumpOffset(position());
+ currentEndOfInstruction = position();
+
+ align(8);
+ stub.setSize(position());
+
+ return close(true);
+ }
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/jdk.aot/share/classes/jdk.tools.jaotc/src/jdk/tools/jaotc/aarch64/AArch64InstructionDecoder.java Mon May 14 12:03:59 2018 +0100
@@ -0,0 +1,47 @@
+/*
+ * Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2018, Red Hat Inc. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+package jdk.tools.jaotc.aarch64;
+
+import jdk.tools.jaotc.InstructionDecoder;
+
+import jdk.vm.ci.code.TargetDescription;
+
+public final class AArch64InstructionDecoder extends InstructionDecoder {
+
+ private int currentEndOfInstruction;
+
+ public AArch64InstructionDecoder(TargetDescription target) {
+ }
+
+ @Override
+ public int currentEndOfInstruction() {
+ return currentEndOfInstruction;
+ }
+
+ @Override
+ public void decodePosition(final byte[] code, int pcOffset) {
+ currentEndOfInstruction = pcOffset + 4;
+ }
+}
--- a/src/jdk.internal.vm.compiler/share/classes/module-info.java Mon May 14 15:43:00 2018 +0200
+++ b/src/jdk.internal.vm.compiler/share/classes/module-info.java Mon May 14 12:03:59 2018 +0100
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2016, 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -44,6 +44,7 @@
exports org.graalvm.compiler.api.runtime to jdk.aot;
exports org.graalvm.compiler.api.replacements to jdk.aot;
exports org.graalvm.compiler.asm.amd64 to jdk.aot;
+ exports org.graalvm.compiler.asm.aarch64 to jdk.aot;
exports org.graalvm.compiler.bytecode to jdk.aot;
exports org.graalvm.compiler.code to jdk.aot;
exports org.graalvm.compiler.core to jdk.aot;
--- a/src/jdk.internal.vm.compiler/share/classes/org.graalvm.compiler.asm.aarch64/src/org/graalvm/compiler/asm/aarch64/AArch64Assembler.java Mon May 14 15:43:00 2018 +0200
+++ b/src/jdk.internal.vm.compiler/share/classes/org.graalvm.compiler.asm.aarch64/src/org/graalvm/compiler/asm/aarch64/AArch64Assembler.java Mon May 14 12:03:59 2018 +0100
@@ -1,5 +1,6 @@
/*
- * Copyright (c) 2013, 2016, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2013, 2018, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2018, Red Hat Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -26,6 +27,7 @@
import static org.graalvm.compiler.asm.aarch64.AArch64Assembler.Instruction.ADD;
import static org.graalvm.compiler.asm.aarch64.AArch64Assembler.Instruction.ADDS;
import static org.graalvm.compiler.asm.aarch64.AArch64Assembler.Instruction.ADR;
+import static org.graalvm.compiler.asm.aarch64.AArch64Assembler.Instruction.ADRP;
import static org.graalvm.compiler.asm.aarch64.AArch64Assembler.Instruction.AND;
import static org.graalvm.compiler.asm.aarch64.AArch64Assembler.Instruction.ANDS;
import static org.graalvm.compiler.asm.aarch64.AArch64Assembler.Instruction.ASRV;
@@ -1347,15 +1349,13 @@
/**
* Address of page: sign extends 21-bit offset, shifts if left by 12 and adds it to the value of
* the PC with its bottom 12-bits cleared, writing the result to dst.
+ * No offset is emiited; the instruction will be patched later.
*
* @param dst general purpose register. May not be null, zero-register or stackpointer.
- * @param imm Signed 33-bit offset with lower 12bits clear.
*/
- // protected void adrp(Register dst, long imm) {
- // assert (imm & NumUtil.getNbitNumberInt(12)) == 0 : "Lower 12-bit of immediate must be zero.";
- // assert NumUtil.isSignedNbit(33, imm);
- // addressCalculationInstruction(dst, (int) (imm >>> 12), Instruction.ADRP);
- // }
+ public void adrp(Register dst) {
+ emitInt(ADRP.encoding | PcRelImmOp | rd(dst) );
+ }
/**
* Adds a 21-bit signed offset to the program counter and writes the result to dst.
@@ -1371,6 +1371,10 @@
emitInt(ADR.encoding | PcRelImmOp | rd(dst) | getPcRelativeImmEncoding(imm21), pos);
}
+ public void adrp(Register dst, int pageOffset) {
+ emitInt(ADRP.encoding | PcRelImmOp | rd(dst) | getPcRelativeImmEncoding(pageOffset));
+ }
+
private static int getPcRelativeImmEncoding(int imm21) {
assert NumUtil.isSignedNbit(21, imm21);
int imm = imm21 & NumUtil.getNbitNumberInt(21);
--- a/src/jdk.internal.vm.compiler/share/classes/org.graalvm.compiler.asm.aarch64/src/org/graalvm/compiler/asm/aarch64/AArch64MacroAssembler.java Mon May 14 15:43:00 2018 +0200
+++ b/src/jdk.internal.vm.compiler/share/classes/org.graalvm.compiler.asm.aarch64/src/org/graalvm/compiler/asm/aarch64/AArch64MacroAssembler.java Mon May 14 12:03:59 2018 +0100
@@ -1,5 +1,6 @@
/*
* Copyright (c) 2013, 2016, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2018, Red Hat Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -1564,6 +1565,12 @@
return AArch64Address.PLACEHOLDER;
}
+ public void addressOf(Register dst) {
+ // This will be fixed up later.
+ super.adrp(dst);
+ super.add(64, dst, dst, 0);
+ }
+
/**
* Loads an address into Register d.
*