8087333: Optionally Pre-Generate the HotSpot Template Interpreter
Summary: Optional support for pregenerated template interpreter
Reviewed-by: coleenp, dholmes, kvn
--- a/hotspot/agent/src/share/classes/sun/jvm/hotspot/code/CodeCache.java Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/agent/src/share/classes/sun/jvm/hotspot/code/CodeCache.java Wed Jul 01 10:53:26 2015 +0200
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2000, 2012, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2000, 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -48,8 +48,13 @@
Type type = db.lookupType("CodeCache");
// Get array of CodeHeaps
+ // Note: CodeHeap may be subclassed with optional private heap mechanisms.
+ Type codeHeapType = db.lookupType("CodeHeap");
+ VirtualBaseConstructor heapConstructor =
+ new VirtualBaseConstructor(db, codeHeapType, "sun.jvm.hotspot.memory", CodeHeap.class);
+
AddressField heapsField = type.getAddressField("_heaps");
- heapArray = GrowableArray.create(heapsField.getValue(), new StaticBaseConstructor<CodeHeap>(CodeHeap.class));
+ heapArray = GrowableArray.create(heapsField.getValue(), heapConstructor);
scavengeRootNMethodsField = type.getAddressField("_scavenge_root_nmethods");
@@ -180,31 +185,9 @@
public void iterate(CodeCacheVisitor visitor) {
visitor.prologue(lowBound(), highBound());
- CodeBlob lastBlob = null;
-
for (int i = 0; i < heapArray.length(); ++i) {
CodeHeap current_heap = heapArray.at(i);
- Address ptr = current_heap.begin();
- while (ptr != null && ptr.lessThan(current_heap.end())) {
- try {
- // Use findStart to get a pointer inside blob other findBlob asserts
- CodeBlob blob = findBlobUnsafe(current_heap.findStart(ptr));
- if (blob != null) {
- visitor.visit(blob);
- if (blob == lastBlob) {
- throw new InternalError("saw same blob twice");
- }
- lastBlob = blob;
- }
- } catch (RuntimeException e) {
- e.printStackTrace();
- }
- Address next = current_heap.nextBlock(ptr);
- if (next != null && next.lessThan(ptr)) {
- throw new InternalError("pointer moved backwards");
- }
- ptr = next;
- }
+ current_heap.iterate(visitor, this);
}
visitor.epilogue();
}
--- a/hotspot/agent/src/share/classes/sun/jvm/hotspot/memory/CodeHeap.java Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/agent/src/share/classes/sun/jvm/hotspot/memory/CodeHeap.java Wed Jul 01 10:53:26 2015 +0200
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2000, 2004, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2000, 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -25,6 +25,7 @@
package sun.jvm.hotspot.memory;
import java.util.*;
+import sun.jvm.hotspot.code.*;
import sun.jvm.hotspot.debugger.*;
import sun.jvm.hotspot.runtime.*;
import sun.jvm.hotspot.types.*;
@@ -90,7 +91,7 @@
return h.getAllocatedSpace();
}
- public Address nextBlock(Address ptr) {
+ private Address nextBlock(Address ptr) {
Address base = blockBase(ptr);
if (base == null) {
return null;
@@ -99,6 +100,31 @@
return base.addOffsetTo(block.getLength() * (1 << getLog2SegmentSize()));
}
+ public void iterate(CodeCacheVisitor visitor, CodeCache cache) {
+ CodeBlob lastBlob = null;
+ Address ptr = begin();
+ while (ptr != null && ptr.lessThan(end())) {
+ try {
+ // Use findStart to get a pointer inside blob other findBlob asserts
+ CodeBlob blob = cache.createCodeBlobWrapper(findStart(ptr));
+ if (blob != null) {
+ visitor.visit(blob);
+ if (blob == lastBlob) {
+ throw new InternalError("saw same blob twice");
+ }
+ lastBlob = blob;
+ }
+ } catch (RuntimeException e) {
+ e.printStackTrace();
+ }
+ Address next = nextBlock(ptr);
+ if (next != null && next.lessThan(ptr)) {
+ throw new InternalError("pointer moved backwards");
+ }
+ ptr = next;
+ }
+ }
+
//--------------------------------------------------------------------------------
// Internals only below this point
//
--- a/hotspot/make/linux/makefiles/buildtree.make Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/make/linux/makefiles/buildtree.make Wed Jul 01 10:53:26 2015 +0200
@@ -118,7 +118,8 @@
$(PLATFORM_DIR)/generated/dependencies \
$(PLATFORM_DIR)/generated/adfiles \
$(PLATFORM_DIR)/generated/jvmtifiles \
- $(PLATFORM_DIR)/generated/tracefiles
+ $(PLATFORM_DIR)/generated/tracefiles \
+ $(PLATFORM_DIR)/generated/extensions
TARGETS = debug fastdebug optimized product
SUBMAKE_DIRS = $(addprefix $(PLATFORM_DIR)/,$(TARGETS))
--- a/hotspot/make/linux/makefiles/rules.make Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/make/linux/makefiles/rules.make Wed Jul 01 10:53:26 2015 +0200
@@ -1,5 +1,5 @@
#
-# Copyright (c) 2003, 2014, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2003, 2015, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
@@ -34,7 +34,7 @@
CC_COMPILE = $(CC) $(CXXFLAGS) $(CFLAGS)
CXX_COMPILE = $(CXX) $(CXXFLAGS) $(CFLAGS)
-AS.S = $(AS) $(ASFLAGS)
+AS.S = $(AS) $(ASFLAGS)
COMPILE.CC = $(CC_COMPILE) -c
GENASM.CC = $(CC_COMPILE) -S
@@ -170,6 +170,12 @@
$(QUIETLY) $(REMOVE_TARGET)
$(QUIETLY) $(AS.S) $(DEPFLAGS) -o $@ $< $(COMPILE_DONE)
+# gcc applies preprocessing if the file extension is .S instead of .s
+%.o: %.S
+ @echo $(LOG_INFO) Preprocessing and assembling $<
+ $(QUIETLY) $(REMOVE_TARGET)
+ $(QUIETLY) $(AS.S) $(DEPFLAGS) -o $@ $< $(COMPILE_DONE)
+
%.s: %.cpp
@echo $(LOG_INFO) Generating assembly for $<
$(QUIETLY) $(GENASM.CXX) -o $@ $<
--- a/hotspot/make/linux/makefiles/vm.make Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/make/linux/makefiles/vm.make Wed Jul 01 10:53:26 2015 +0200
@@ -54,7 +54,7 @@
# Src_Dirs_V is everything in src/share/vm/*, plus the right os/*/vm and cpu/*/vm
# The adfiles directory contains ad_<arch>.[ch]pp.
# The jvmtifiles directory contains jvmti*.[ch]pp
-Src_Dirs_V += $(GENERATED)/adfiles $(GENERATED)/jvmtifiles $(GENERATED)/tracefiles
+Src_Dirs_V += $(GENERATED)/adfiles $(GENERATED)/jvmtifiles $(GENERATED)/tracefiles $(GENERATED)/extensions
VPATH += $(Src_Dirs_V:%=%:)
# set INCLUDES for C preprocessor.
@@ -161,6 +161,8 @@
fi)
endif
+CORE_PATHS+=$(GENERATED)/extensions
+
COMPILER1_PATHS := $(call altsrc,$(HS_COMMON_SRC)/share/vm/c1)
COMPILER1_PATHS += $(HS_COMMON_SRC)/share/vm/c1
@@ -207,6 +209,8 @@
Src_Files_EXCLUDE += \*x86_32\*
endif
+Src_Files_BASE += \*.c \*.cpp \*.s
+
# Alternate vm.make
# This has to be included here to allow changes to the source
# directories and excluded files before they are expanded
@@ -216,13 +220,13 @@
# Locate all source files in the given directory, excluding files in Src_Files_EXCLUDE.
define findsrc
$(notdir $(shell find $(1)/. ! -name . -prune \
- -a \( -name \*.c -o -name \*.cpp -o -name \*.s \) \
+ -a \( -name DUMMY $(addprefix -o -name ,$(Src_Files_BASE)) \) \
-a ! \( -name DUMMY $(addprefix -o -name ,$(Src_Files_EXCLUDE)) \)))
endef
Src_Files := $(foreach e,$(Src_Dirs),$(call findsrc,$(e)))
-Obj_Files = $(sort $(addsuffix .o,$(basename $(Src_Files))))
+Obj_Files = $(sort $(addsuffix .o,$(basename $(Src_Files))) $(EXTENDED_JVM_OBJ_FILES))
JVM_OBJ_FILES = $(Obj_Files)
@@ -244,10 +248,16 @@
VMDEF_PAT := ^gHotSpotVM|$(VMDEF_PAT)
VMDEF_PAT := ^UseSharedSpaces$$|$(VMDEF_PAT)
VMDEF_PAT := ^_ZN9Arguments17SharedArchivePathE$$|$(VMDEF_PAT)
+ifneq ($(VMDEF_PAT_EXT),)
+ VMDEF_PAT := $(VMDEF_PAT_EXT)|$(VMDEF_PAT)
+endif
-vm.def: $(Res_Files) $(Obj_Files)
+vm.def: $(Res_Files) $(Obj_Files) $(VM_DEF_EXT)
$(QUIETLY) $(NM) --defined-only $(Obj_Files) | sort -k3 -u | \
awk '$$3 ~ /$(VMDEF_PAT)/ { print "\t" $$3 ";" }' > $@
+ifneq ($(VM_DEF_EXT),)
+ cat $(VM_DEF_EXT) >> $@
+endif
mapfile_ext:
rm -f $@
--- a/hotspot/src/share/vm/asm/codeBuffer.hpp Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/src/share/vm/asm/codeBuffer.hpp Wed Jul 01 10:53:26 2015 +0200
@@ -504,6 +504,7 @@
// Properties
const char* name() const { return _name; }
+ void set_name(const char* name) { _name = name; }
CodeBuffer* before_expand() const { return _before_expand; }
BufferBlob* blob() const { return _blob; }
void set_blob(BufferBlob* blob);
--- a/hotspot/src/share/vm/c1/c1_Runtime1.cpp Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/src/share/vm/c1/c1_Runtime1.cpp Wed Jul 01 10:53:26 2015 +0200
@@ -33,6 +33,7 @@
#include "classfile/systemDictionary.hpp"
#include "classfile/vmSymbols.hpp"
#include "code/codeBlob.hpp"
+#include "code/codeCacheExtensions.hpp"
#include "code/compiledIC.hpp"
#include "code/pcDesc.hpp"
#include "code/scopeDesc.hpp"
@@ -183,20 +184,25 @@
// create code buffer for code storage
CodeBuffer code(buffer_blob);
- Compilation::setup_code_buffer(&code, 0);
+ OopMapSet* oop_maps;
+ int frame_size;
+ bool must_gc_arguments;
- // create assembler for code generation
- StubAssembler* sasm = new StubAssembler(&code, name_for(id), id);
- // generate code for runtime stub
- OopMapSet* oop_maps;
- oop_maps = generate_code_for(id, sasm);
- assert(oop_maps == NULL || sasm->frame_size() != no_frame_size,
- "if stub has an oop map it must have a valid frame size");
+ if (!CodeCacheExtensions::skip_compiler_support()) {
+ // bypass useless code generation
+ Compilation::setup_code_buffer(&code, 0);
+
+ // create assembler for code generation
+ StubAssembler* sasm = new StubAssembler(&code, name_for(id), id);
+ // generate code for runtime stub
+ oop_maps = generate_code_for(id, sasm);
+ assert(oop_maps == NULL || sasm->frame_size() != no_frame_size,
+ "if stub has an oop map it must have a valid frame size");
#ifdef ASSERT
- // Make sure that stubs that need oopmaps have them
- switch (id) {
- // These stubs don't need to have an oopmap
+ // Make sure that stubs that need oopmaps have them
+ switch (id) {
+ // These stubs don't need to have an oopmap
case dtrace_object_alloc_id:
case g1_pre_barrier_slow_id:
case g1_post_barrier_slow_id:
@@ -209,23 +215,32 @@
#endif
break;
- // All other stubs should have oopmaps
+ // All other stubs should have oopmaps
default:
assert(oop_maps != NULL, "must have an oopmap");
- }
+ }
#endif
- // align so printing shows nop's instead of random code at the end (SimpleStubs are aligned)
- sasm->align(BytesPerWord);
- // make sure all code is in code buffer
- sasm->flush();
+ // align so printing shows nop's instead of random code at the end (SimpleStubs are aligned)
+ sasm->align(BytesPerWord);
+ // make sure all code is in code buffer
+ sasm->flush();
+
+ frame_size = sasm->frame_size();
+ must_gc_arguments = sasm->must_gc_arguments();
+ } else {
+ /* ignored values */
+ oop_maps = NULL;
+ frame_size = 0;
+ must_gc_arguments = false;
+ }
// create blob - distinguish a few special cases
CodeBlob* blob = RuntimeStub::new_runtime_stub(name_for(id),
&code,
CodeOffsets::frame_never_safe,
- sasm->frame_size(),
+ frame_size,
oop_maps,
- sasm->must_gc_arguments());
+ must_gc_arguments);
// install blob
assert(blob != NULL, "blob must exist");
_blobs[id] = blob;
@@ -399,7 +414,7 @@
CompLevel level = (CompLevel)nm->comp_level();
int bci = InvocationEntryBci;
if (branch_bci != InvocationEntryBci) {
- // Compute desination bci
+ // Compute destination bci
address pc = method()->code_base() + branch_bci;
Bytecodes::Code branch = Bytecodes::code_at(method(), pc);
int offset = 0;
--- a/hotspot/src/share/vm/code/codeBlob.cpp Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/src/share/vm/code/codeBlob.cpp Wed Jul 01 10:53:26 2015 +0200
@@ -25,6 +25,7 @@
#include "precompiled.hpp"
#include "code/codeBlob.hpp"
#include "code/codeCache.hpp"
+#include "code/codeCacheExtensions.hpp"
#include "code/relocInfo.hpp"
#include "compiler/disassembler.hpp"
#include "interpreter/bytecode.hpp"
@@ -194,6 +195,7 @@
BufferBlob* blob = NULL;
unsigned int size = sizeof(BufferBlob);
+ CodeCacheExtensions::size_blob(name, &buffer_size);
// align the size to CodeEntryAlignment
size = align_code_offset(size);
size += round_to(buffer_size, oopSize);
@@ -277,6 +279,7 @@
MethodHandlesAdapterBlob* blob = NULL;
unsigned int size = sizeof(MethodHandlesAdapterBlob);
+ CodeCacheExtensions::size_blob("MethodHandles adapters", &buffer_size);
// align the size to CodeEntryAlignment
size = align_code_offset(size);
size += round_to(buffer_size, oopSize);
@@ -317,11 +320,13 @@
{
RuntimeStub* stub = NULL;
ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
- {
+ if (!CodeCacheExtensions::skip_code_generation()) {
+ // bypass useless code generation
MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
unsigned int size = allocation_size(cb, sizeof(RuntimeStub));
stub = new (size) RuntimeStub(stub_name, cb, size, frame_complete, frame_size, oop_maps, caller_must_gc_arguments);
}
+ stub = (RuntimeStub*) CodeCacheExtensions::handle_generated_blob(stub, stub_name);
trace_new_stub(stub, "RuntimeStub - ", stub_name);
--- a/hotspot/src/share/vm/code/codeBlob.hpp Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/src/share/vm/code/codeBlob.hpp Wed Jul 01 10:53:26 2015 +0200
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 1998, 2013, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 1998, 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -38,7 +38,8 @@
MethodProfiled = 1, // Execution level 2 and 3 (profiled) nmethods
NonNMethod = 2, // Non-nmethods like Buffers, Adapters and Runtime Stubs
All = 3, // All types (No code cache segmentation)
- NumTypes = 4 // Number of CodeBlobTypes
+ Pregenerated = 4, // Special blobs, managed by CodeCacheExtensions
+ NumTypes = 5 // Number of CodeBlobTypes
};
};
@@ -63,6 +64,7 @@
class CodeBlob VALUE_OBJ_CLASS_SPEC {
friend class VMStructs;
+ friend class CodeCacheDumper;
private:
const char* _name;
@@ -206,6 +208,14 @@
void set_strings(CodeStrings& strings) {
_strings.assign(strings);
}
+
+ static ByteSize name_field_offset() {
+ return byte_offset_of(CodeBlob, _name);
+ }
+
+ static ByteSize oop_maps_field_offset() {
+ return byte_offset_of(CodeBlob, _oop_maps);
+ }
};
class WhiteBox;
--- a/hotspot/src/share/vm/code/codeCache.cpp Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/src/share/vm/code/codeCache.cpp Wed Jul 01 10:53:26 2015 +0200
@@ -409,7 +409,7 @@
}
if (PrintCodeCacheExtension) {
ResourceMark rm;
- if (SegmentedCodeCache) {
+ if (_heaps->length() >= 1) {
tty->print("%s", heap->name());
} else {
tty->print("CodeCache");
@@ -1211,7 +1211,7 @@
int i = 0;
FOR_ALL_HEAPS(heap) {
- if (SegmentedCodeCache && Verbose) {
+ if ((_heaps->length() >= 1) && Verbose) {
tty->print_cr("-- %s --", (*heap)->name());
}
FOR_ALL_BLOBS(cb, *heap) {
@@ -1360,7 +1360,7 @@
FOR_ALL_HEAPS(heap_iterator) {
CodeHeap* heap = (*heap_iterator);
size_t total = (heap->high_boundary() - heap->low_boundary());
- if (SegmentedCodeCache) {
+ if (_heaps->length() >= 1) {
st->print("%s:", heap->name());
} else {
st->print("CodeCache:");
--- a/hotspot/src/share/vm/code/codeCache.hpp Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/src/share/vm/code/codeCache.hpp Wed Jul 01 10:53:26 2015 +0200
@@ -78,6 +78,7 @@
friend class VMStructs;
friend class NMethodIterator;
friend class WhiteBox;
+ friend class CodeCacheLoader;
private:
// CodeHeaps of the cache
static GrowableArray<CodeHeap*>* _heaps;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hotspot/src/share/vm/code/codeCacheExtensions.hpp Wed Jul 01 10:53:26 2015 +0200
@@ -0,0 +1,52 @@
+/*
+ * Copyright (c) 2012, 2015, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ *
+ */
+
+#ifndef SHARE_VM_CODE_CODE_CACHE_EXTENSIONS_HPP
+#define SHARE_VM_CODE_CODE_CACHE_EXTENSIONS_HPP
+
+#include "memory/allocation.hpp"
+
+class CodeCacheExtensionsSteps: AllStatic {
+public:
+ enum Step {
+ // Support for optional fine grain initialization hooks
+ // Note: these hooks must support refining the granularity
+ // (e.g. adding intermediate steps in the ordered enum
+ // if needed for future features)
+ Start,
+ VMVersion,
+ StubRoutines1,
+ Universe,
+ TemplateInterpreter,
+ Interpreter,
+ StubRoutines2,
+ InitGlobals,
+ CreateVM,
+ LastStep
+ };
+};
+
+#include "code/codeCacheExtensions_ext.hpp"
+
+#endif // SHARE_VM_CODE_CODE_CACHE_EXTENSIONS_HPP
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hotspot/src/share/vm/code/codeCacheExtensions_ext.hpp Wed Jul 01 10:53:26 2015 +0200
@@ -0,0 +1,130 @@
+/*
+ * Copyright (c) 2012, 2015, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ *
+ */
+
+#ifndef SHARE_VM_CODE_CODE_CACHE_EXTENSIONS_EXT_HPP
+#define SHARE_VM_CODE_CODE_CACHE_EXTENSIONS_EXT_HPP
+
+#include "utilities/macros.hpp"
+#include "memory/allocation.hpp"
+#include "utilities/globalDefinitions.hpp"
+#include "interpreter/bytecodes.hpp"
+
+class AdapterHandlerEntry;
+class CodeBlob;
+class CodeBuffer;
+class InterpreterMacroAssembler;
+class Template;
+
+// All the methods defined here are placeholders for possible extensions.
+
+class CodeCacheExtensions: AllStatic {
+ friend class CodeCacheDumper;
+
+public:
+ // init both code saving and loading
+ // Must be called very early, before any code is generated.
+ static void initialize() {}
+
+ // Check whether the generated interpreter will be saved.
+ static bool saving_generated_interpreter() { return false; }
+
+ // Check whether a pregenerated interpreter is used.
+ static bool use_pregenerated_interpreter() { return false; }
+
+ // Placeholder for additional VM initialization code
+ static void complete_step(CodeCacheExtensionsSteps::Step phase) {}
+
+ // Return false for newly generated code, on systems where it is not
+ // executable.
+ static bool is_executable(void *pc) { return true; }
+
+ // Return whether dynamically generated code can be executable
+ static bool support_dynamic_code() { return true; }
+
+ // Skip new code generation when known to be useless.
+ static bool skip_code_generation() { return false; }
+
+ // Skip stubs used only for compiled code support.
+ static bool skip_compiler_support() { return false; }
+
+ // Ignore UseFastSignatureHandlers when returning false
+ static bool support_fast_signature_handlers() { return true; }
+
+ /////////////////////////
+ // Handle generated code:
+ // - allow newly generated code to be shared
+ // - allow pregenerated code to be used in place of the newly generated one
+ // (modifying pc).
+ // - support remapping when doing both save and load
+ // 'remap' can be set to false if the addresses handled are not referenced
+ // from code generated later.
+
+ // Associate a name to a generated codelet and possibly modify the pc
+ // Note: use instead the specialized versions when they exist:
+ // - handle_generated_blob for CodeBlob
+ // - handle_generated_handler for SignatureHandlers
+ // See also the optimized calls below that handle several PCs at once.
+ static void handle_generated_pc(address &pc, const char *name) {}
+
+ // Adds a safe definition of the codelet, for codelets used right after
+ // generation (else we would need to immediately stop the JVM and convert
+ // the generated code to executable format before being able to go further).
+ static void handle_generated_pc(address &pc, const char *name, address default_entry) {}
+
+ // Special cases
+
+ // Special case for CodeBlobs, which may require blob specific actions.
+ static CodeBlob* handle_generated_blob(CodeBlob* blob, const char *name = NULL) { return blob; }
+
+ // Special case for Signature Handlers.
+ static void handle_generated_handler(address &handler_start, const char *name, address handler_end) {}
+
+ // Support for generating different variants of the interpreter
+ // that can be dynamically selected after reload.
+ //
+ // - init_interpreter_assembler allows to configure the assembler for
+ // the current variant
+ //
+ // - needs_other_interpreter_variant returns true as long as other
+ // variants are needed.
+ //
+ // - skip_template_interpreter_entries returns true if new entries
+ // need not be generated for this masm setup and this bytecode
+ //
+ // - completed_template_interpreter_entries is called after new
+ // entries have been generated and installed, for any non skipped
+ // bytecode.
+ static void init_interpreter_assembler(InterpreterMacroAssembler* masm, CodeBuffer* code) {}
+ static bool needs_other_interpreter_variant() { return false; }
+ static bool skip_template_interpreter_entries(Bytecodes::Code code) { return false; }
+ static void completed_template_interpreter_entries(InterpreterMacroAssembler* masm, Bytecodes::Code code) {}
+
+ // Code size optimization. May optimize the requested size.
+ static void size_blob(const char* name, int *updatable_size) {}
+
+ // ergonomics
+ static void set_ergonomics_flags() {}
+};
+
+#endif // SHARE_VM_CODE_CODE_CACHE_EXTENSIONS_EXT_HPP
--- a/hotspot/src/share/vm/code/stubs.cpp Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/src/share/vm/code/stubs.cpp Wed Jul 01 10:53:26 2015 +0200
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 1997, 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -261,3 +261,17 @@
stub_print(s);
}
}
+
+// Fixup for pregenerated code
+void StubQueue::fix_buffer(address buffer, address queue_end, address buffer_end, int number_of_stubs) {
+ const int extra_bytes = CodeEntryAlignment;
+ _stub_buffer = buffer;
+ _queue_begin = 0;
+ _queue_end = queue_end - buffer;
+ _number_of_stubs = number_of_stubs;
+ int size = buffer_end - buffer;
+ // Note: _buffer_limit must differ from _queue_end in the iteration loops
+ // => add extra space at the end (preserving alignment for asserts) if needed
+ if (buffer_end == queue_end) size += extra_bytes;
+ _buffer_limit = _buffer_size = size;
+}
--- a/hotspot/src/share/vm/code/stubs.hpp Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/src/share/vm/code/stubs.hpp Wed Jul 01 10:53:26 2015 +0200
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 1997, 2014, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 1997, 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -216,6 +216,9 @@
// Debugging/printing
void verify(); // verifies the stub queue
void print(); // prints information about the stub queue
+
+ // Fixup for pregenerated code
+ void fix_buffer(address buffer, address queue_end, address buffer_end, int number_of_stubs);
};
#endif // SHARE_VM_CODE_STUBS_HPP
--- a/hotspot/src/share/vm/interpreter/interpreter.cpp Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/src/share/vm/interpreter/interpreter.cpp Wed Jul 01 10:53:26 2015 +0200
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 1997, 2014, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 1997, 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -134,8 +134,10 @@
tty->print_cr("wasted space = %6dK bytes", (int)_code->available_space()/1024);
tty->cr();
tty->print_cr("# of codelets = %6d" , _code->number_of_stubs());
- tty->print_cr("avg codelet size = %6d bytes", _code->used_space() / _code->number_of_stubs());
- tty->cr();
+ if (_code->number_of_stubs() != 0) {
+ tty->print_cr("avg codelet size = %6d bytes", _code->used_space() / _code->number_of_stubs());
+ tty->cr();
+ }
_code->print();
tty->print_cr("----------------------------------------------------------------------");
tty->cr();
--- a/hotspot/src/share/vm/interpreter/interpreter.hpp Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/src/share/vm/interpreter/interpreter.hpp Wed Jul 01 10:53:26 2015 +0200
@@ -45,6 +45,7 @@
class InterpreterCodelet: public Stub {
friend class VMStructs;
+ friend class CodeCacheDumper; // possible extension [do not remove]
private:
int _size; // the size in bytes
const char* _description; // a description of the codelet, for debugging & printing
--- a/hotspot/src/share/vm/interpreter/interpreterRuntime.cpp Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/src/share/vm/interpreter/interpreterRuntime.cpp Wed Jul 01 10:53:26 2015 +0200
@@ -27,6 +27,7 @@
#include "classfile/systemDictionary.hpp"
#include "classfile/vmSymbols.hpp"
#include "code/codeCache.hpp"
+#include "code/codeCacheExtensions.hpp"
#include "compiler/compileBroker.hpp"
#include "compiler/disassembler.hpp"
#include "gc/shared/collectedHeap.hpp"
@@ -1178,6 +1179,7 @@
ICache::invalidate_range(handler, insts_size);
_handler = handler + insts_size;
}
+ CodeCacheExtensions::handle_generated_handler(handler, buffer->name(), _handler);
return handler;
}
@@ -1186,7 +1188,7 @@
// use slow signature handler if we can't do better
int handler_index = -1;
// check if we can use customized (fast) signature handler
- if (UseFastSignatureHandlers && method->size_of_parameters() <= Fingerprinter::max_size_of_parameters) {
+ if (UseFastSignatureHandlers && CodeCacheExtensions::support_fast_signature_handlers() && method->size_of_parameters() <= Fingerprinter::max_size_of_parameters) {
// use customized signature handler
MutexLocker mu(SignatureHandlerLibrary_lock);
// make sure data structure is initialized
@@ -1203,14 +1205,23 @@
round_to((intptr_t)_buffer, CodeEntryAlignment) - (address)_buffer;
CodeBuffer buffer((address)(_buffer + align_offset),
SignatureHandlerLibrary::buffer_size - align_offset);
+ if (!CodeCacheExtensions::support_dynamic_code()) {
+ // we need a name for the signature (for lookups or saving)
+ const int SYMBOL_SIZE = 50;
+ char *symbolName = NEW_RESOURCE_ARRAY(char, SYMBOL_SIZE);
+ // support for named signatures
+ jio_snprintf(symbolName, SYMBOL_SIZE,
+ "native_" UINT64_FORMAT, fingerprint);
+ buffer.set_name(symbolName);
+ }
InterpreterRuntime::SignatureHandlerGenerator(method, &buffer).generate(fingerprint);
// copy into code heap
address handler = set_handler(&buffer);
if (handler == NULL) {
- // use slow signature handler
+ // use slow signature handler (without memorizing it in the fingerprints)
} else {
// debugging suppport
- if (PrintSignatureHandlers) {
+ if (PrintSignatureHandlers && (handler != Interpreter::slow_signature_handler())) {
tty->cr();
tty->print_cr("argument handler #%d for: %s %s (fingerprint = " UINT64_FORMAT ", %d bytes generated)",
_handlers->length(),
@@ -1218,7 +1229,10 @@
method->name_and_sig_as_C_string(),
fingerprint,
buffer.insts_size());
- Disassembler::decode(handler, handler + buffer.insts_size());
+ if (buffer.insts_size() > 0) {
+ // buffer may be empty for pregenerated handlers
+ Disassembler::decode(handler, handler + buffer.insts_size());
+ }
#ifndef PRODUCT
address rh_begin = Interpreter::result_handler(method()->result_type());
if (CodeCache::contains(rh_begin)) {
@@ -1277,6 +1291,37 @@
#endif // ASSERT
}
+void SignatureHandlerLibrary::add(uint64_t fingerprint, address handler) {
+ int handler_index = -1;
+ // use customized signature handler
+ MutexLocker mu(SignatureHandlerLibrary_lock);
+ // make sure data structure is initialized
+ initialize();
+ fingerprint = InterpreterRuntime::normalize_fast_native_fingerprint(fingerprint);
+ handler_index = _fingerprints->find(fingerprint);
+ // create handler if necessary
+ if (handler_index < 0) {
+ if (PrintSignatureHandlers && (handler != Interpreter::slow_signature_handler())) {
+ tty->cr();
+ tty->print_cr("argument handler #%d at "PTR_FORMAT" for fingerprint " UINT64_FORMAT,
+ _handlers->length(),
+ handler,
+ fingerprint);
+ }
+ _fingerprints->append(fingerprint);
+ _handlers->append(handler);
+ } else {
+ if (PrintSignatureHandlers) {
+ tty->cr();
+ tty->print_cr("duplicate argument handler #%d for fingerprint " UINT64_FORMAT "(old: "PTR_FORMAT", new : "PTR_FORMAT")",
+ _handlers->length(),
+ fingerprint,
+ _handlers->at(handler_index),
+ handler);
+ }
+ }
+}
+
BufferBlob* SignatureHandlerLibrary::_handler_blob = NULL;
address SignatureHandlerLibrary::_handler = NULL;
--- a/hotspot/src/share/vm/interpreter/interpreterRuntime.hpp Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/src/share/vm/interpreter/interpreterRuntime.hpp Wed Jul 01 10:53:26 2015 +0200
@@ -219,6 +219,7 @@
public:
static void add(methodHandle method);
+ static void add(uint64_t fingerprint, address handler);
};
#endif // SHARE_VM_INTERPRETER_INTERPRETERRUNTIME_HPP
--- a/hotspot/src/share/vm/interpreter/templateInterpreter.cpp Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/src/share/vm/interpreter/templateInterpreter.cpp Wed Jul 01 10:53:26 2015 +0200
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 1997, 2014, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 1997, 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -23,6 +23,7 @@
*/
#include "precompiled.hpp"
+#include "code/codeCacheExtensions.hpp"
#include "interpreter/interpreter.hpp"
#include "interpreter/interpreterGenerator.hpp"
#include "interpreter/interpreterRuntime.hpp"
@@ -49,10 +50,33 @@
TraceTime timer("Interpreter generation", TraceStartupTime);
int code_size = InterpreterCodeSize;
NOT_PRODUCT(code_size *= 4;) // debug uses extra interpreter code space
+#if INCLUDE_JVMTI
+ if (CodeCacheExtensions::saving_generated_interpreter()) {
+ // May requires several versions of the codelets.
+ // Final size will automatically be optimized.
+ code_size *= 2;
+ }
+#endif
_code = new StubQueue(new InterpreterCodeletInterface, code_size, NULL,
"Interpreter");
InterpreterGenerator g(_code);
- if (PrintInterpreter) print();
+ }
+ if (PrintInterpreter) {
+ if (CodeCacheExtensions::saving_generated_interpreter() &&
+ CodeCacheExtensions::use_pregenerated_interpreter()) {
+ ResourceMark rm;
+ tty->print("Printing the newly generated interpreter first");
+ print();
+ tty->print("Printing the pregenerated interpreter next");
+ }
+ }
+
+ // Install the pregenerated interpreter code before printing it
+ CodeCacheExtensions::complete_step(CodeCacheExtensionsSteps::TemplateInterpreter);
+
+ if (PrintInterpreter) {
+ ResourceMark rm;
+ print();
}
// initialize dispatch table
@@ -214,194 +238,203 @@
};
void TemplateInterpreterGenerator::generate_all() {
- AbstractInterpreterGenerator::generate_all();
+ // Loop, in case we need several variants of the interpreter entries
+ do {
+ if (!CodeCacheExtensions::skip_code_generation()) {
+ // bypass code generation when useless
+ AbstractInterpreterGenerator::generate_all();
- { CodeletMark cm(_masm, "error exits");
- _unimplemented_bytecode = generate_error_exit("unimplemented bytecode");
- _illegal_bytecode_sequence = generate_error_exit("illegal bytecode sequence - method not verified");
- }
+ { CodeletMark cm(_masm, "error exits");
+ _unimplemented_bytecode = generate_error_exit("unimplemented bytecode");
+ _illegal_bytecode_sequence = generate_error_exit("illegal bytecode sequence - method not verified");
+ }
#ifndef PRODUCT
- if (TraceBytecodes) {
- CodeletMark cm(_masm, "bytecode tracing support");
- Interpreter::_trace_code =
- EntryPoint(
- generate_trace_code(btos),
- generate_trace_code(ctos),
- generate_trace_code(stos),
- generate_trace_code(atos),
- generate_trace_code(itos),
- generate_trace_code(ltos),
- generate_trace_code(ftos),
- generate_trace_code(dtos),
- generate_trace_code(vtos)
- );
- }
+ if (TraceBytecodes) {
+ CodeletMark cm(_masm, "bytecode tracing support");
+ Interpreter::_trace_code =
+ EntryPoint(
+ generate_trace_code(btos),
+ generate_trace_code(ctos),
+ generate_trace_code(stos),
+ generate_trace_code(atos),
+ generate_trace_code(itos),
+ generate_trace_code(ltos),
+ generate_trace_code(ftos),
+ generate_trace_code(dtos),
+ generate_trace_code(vtos)
+ );
+ }
#endif // !PRODUCT
- { CodeletMark cm(_masm, "return entry points");
- const int index_size = sizeof(u2);
- for (int i = 0; i < Interpreter::number_of_return_entries; i++) {
- Interpreter::_return_entry[i] =
- EntryPoint(
- generate_return_entry_for(itos, i, index_size),
- generate_return_entry_for(itos, i, index_size),
- generate_return_entry_for(itos, i, index_size),
- generate_return_entry_for(atos, i, index_size),
- generate_return_entry_for(itos, i, index_size),
- generate_return_entry_for(ltos, i, index_size),
- generate_return_entry_for(ftos, i, index_size),
- generate_return_entry_for(dtos, i, index_size),
- generate_return_entry_for(vtos, i, index_size)
- );
- }
- }
+ { CodeletMark cm(_masm, "return entry points");
+ const int index_size = sizeof(u2);
+ for (int i = 0; i < Interpreter::number_of_return_entries; i++) {
+ Interpreter::_return_entry[i] =
+ EntryPoint(
+ generate_return_entry_for(itos, i, index_size),
+ generate_return_entry_for(itos, i, index_size),
+ generate_return_entry_for(itos, i, index_size),
+ generate_return_entry_for(atos, i, index_size),
+ generate_return_entry_for(itos, i, index_size),
+ generate_return_entry_for(ltos, i, index_size),
+ generate_return_entry_for(ftos, i, index_size),
+ generate_return_entry_for(dtos, i, index_size),
+ generate_return_entry_for(vtos, i, index_size)
+ );
+ }
+ }
- { CodeletMark cm(_masm, "invoke return entry points");
- const TosState states[] = {itos, itos, itos, itos, ltos, ftos, dtos, atos, vtos};
- const int invoke_length = Bytecodes::length_for(Bytecodes::_invokestatic);
- const int invokeinterface_length = Bytecodes::length_for(Bytecodes::_invokeinterface);
- const int invokedynamic_length = Bytecodes::length_for(Bytecodes::_invokedynamic);
+ { CodeletMark cm(_masm, "invoke return entry points");
+ const TosState states[] = {itos, itos, itos, itos, ltos, ftos, dtos, atos, vtos};
+ const int invoke_length = Bytecodes::length_for(Bytecodes::_invokestatic);
+ const int invokeinterface_length = Bytecodes::length_for(Bytecodes::_invokeinterface);
+ const int invokedynamic_length = Bytecodes::length_for(Bytecodes::_invokedynamic);
- for (int i = 0; i < Interpreter::number_of_return_addrs; i++) {
- TosState state = states[i];
- Interpreter::_invoke_return_entry[i] = generate_return_entry_for(state, invoke_length, sizeof(u2));
- Interpreter::_invokeinterface_return_entry[i] = generate_return_entry_for(state, invokeinterface_length, sizeof(u2));
- Interpreter::_invokedynamic_return_entry[i] = generate_return_entry_for(state, invokedynamic_length, sizeof(u4));
- }
- }
+ for (int i = 0; i < Interpreter::number_of_return_addrs; i++) {
+ TosState state = states[i];
+ Interpreter::_invoke_return_entry[i] = generate_return_entry_for(state, invoke_length, sizeof(u2));
+ Interpreter::_invokeinterface_return_entry[i] = generate_return_entry_for(state, invokeinterface_length, sizeof(u2));
+ Interpreter::_invokedynamic_return_entry[i] = generate_return_entry_for(state, invokedynamic_length, sizeof(u4));
+ }
+ }
- { CodeletMark cm(_masm, "earlyret entry points");
- Interpreter::_earlyret_entry =
- EntryPoint(
- generate_earlyret_entry_for(btos),
- generate_earlyret_entry_for(ctos),
- generate_earlyret_entry_for(stos),
- generate_earlyret_entry_for(atos),
- generate_earlyret_entry_for(itos),
- generate_earlyret_entry_for(ltos),
- generate_earlyret_entry_for(ftos),
- generate_earlyret_entry_for(dtos),
- generate_earlyret_entry_for(vtos)
- );
- }
+ { CodeletMark cm(_masm, "earlyret entry points");
+ Interpreter::_earlyret_entry =
+ EntryPoint(
+ generate_earlyret_entry_for(btos),
+ generate_earlyret_entry_for(ctos),
+ generate_earlyret_entry_for(stos),
+ generate_earlyret_entry_for(atos),
+ generate_earlyret_entry_for(itos),
+ generate_earlyret_entry_for(ltos),
+ generate_earlyret_entry_for(ftos),
+ generate_earlyret_entry_for(dtos),
+ generate_earlyret_entry_for(vtos)
+ );
+ }
- { CodeletMark cm(_masm, "deoptimization entry points");
- for (int i = 0; i < Interpreter::number_of_deopt_entries; i++) {
- Interpreter::_deopt_entry[i] =
- EntryPoint(
- generate_deopt_entry_for(itos, i),
- generate_deopt_entry_for(itos, i),
- generate_deopt_entry_for(itos, i),
- generate_deopt_entry_for(atos, i),
- generate_deopt_entry_for(itos, i),
- generate_deopt_entry_for(ltos, i),
- generate_deopt_entry_for(ftos, i),
- generate_deopt_entry_for(dtos, i),
- generate_deopt_entry_for(vtos, i)
- );
- }
- }
+ { CodeletMark cm(_masm, "deoptimization entry points");
+ for (int i = 0; i < Interpreter::number_of_deopt_entries; i++) {
+ Interpreter::_deopt_entry[i] =
+ EntryPoint(
+ generate_deopt_entry_for(itos, i),
+ generate_deopt_entry_for(itos, i),
+ generate_deopt_entry_for(itos, i),
+ generate_deopt_entry_for(atos, i),
+ generate_deopt_entry_for(itos, i),
+ generate_deopt_entry_for(ltos, i),
+ generate_deopt_entry_for(ftos, i),
+ generate_deopt_entry_for(dtos, i),
+ generate_deopt_entry_for(vtos, i)
+ );
+ }
+ }
- { CodeletMark cm(_masm, "result handlers for native calls");
- // The various result converter stublets.
- int is_generated[Interpreter::number_of_result_handlers];
- memset(is_generated, 0, sizeof(is_generated));
+ { CodeletMark cm(_masm, "result handlers for native calls");
+ // The various result converter stublets.
+ int is_generated[Interpreter::number_of_result_handlers];
+ memset(is_generated, 0, sizeof(is_generated));
- for (int i = 0; i < Interpreter::number_of_result_handlers; i++) {
- BasicType type = types[i];
- if (!is_generated[Interpreter::BasicType_as_index(type)]++) {
- Interpreter::_native_abi_to_tosca[Interpreter::BasicType_as_index(type)] = generate_result_handler_for(type);
+ for (int i = 0; i < Interpreter::number_of_result_handlers; i++) {
+ BasicType type = types[i];
+ if (!is_generated[Interpreter::BasicType_as_index(type)]++) {
+ Interpreter::_native_abi_to_tosca[Interpreter::BasicType_as_index(type)] = generate_result_handler_for(type);
+ }
+ }
}
- }
- }
- { CodeletMark cm(_masm, "continuation entry points");
- Interpreter::_continuation_entry =
- EntryPoint(
- generate_continuation_for(btos),
- generate_continuation_for(ctos),
- generate_continuation_for(stos),
- generate_continuation_for(atos),
- generate_continuation_for(itos),
- generate_continuation_for(ltos),
- generate_continuation_for(ftos),
- generate_continuation_for(dtos),
- generate_continuation_for(vtos)
- );
- }
+ { CodeletMark cm(_masm, "continuation entry points");
+ Interpreter::_continuation_entry =
+ EntryPoint(
+ generate_continuation_for(btos),
+ generate_continuation_for(ctos),
+ generate_continuation_for(stos),
+ generate_continuation_for(atos),
+ generate_continuation_for(itos),
+ generate_continuation_for(ltos),
+ generate_continuation_for(ftos),
+ generate_continuation_for(dtos),
+ generate_continuation_for(vtos)
+ );
+ }
- { CodeletMark cm(_masm, "safepoint entry points");
- Interpreter::_safept_entry =
- EntryPoint(
- generate_safept_entry_for(btos, CAST_FROM_FN_PTR(address, InterpreterRuntime::at_safepoint)),
- generate_safept_entry_for(ctos, CAST_FROM_FN_PTR(address, InterpreterRuntime::at_safepoint)),
- generate_safept_entry_for(stos, CAST_FROM_FN_PTR(address, InterpreterRuntime::at_safepoint)),
- generate_safept_entry_for(atos, CAST_FROM_FN_PTR(address, InterpreterRuntime::at_safepoint)),
- generate_safept_entry_for(itos, CAST_FROM_FN_PTR(address, InterpreterRuntime::at_safepoint)),
- generate_safept_entry_for(ltos, CAST_FROM_FN_PTR(address, InterpreterRuntime::at_safepoint)),
- generate_safept_entry_for(ftos, CAST_FROM_FN_PTR(address, InterpreterRuntime::at_safepoint)),
- generate_safept_entry_for(dtos, CAST_FROM_FN_PTR(address, InterpreterRuntime::at_safepoint)),
- generate_safept_entry_for(vtos, CAST_FROM_FN_PTR(address, InterpreterRuntime::at_safepoint))
- );
- }
+ { CodeletMark cm(_masm, "safepoint entry points");
+ Interpreter::_safept_entry =
+ EntryPoint(
+ generate_safept_entry_for(btos, CAST_FROM_FN_PTR(address, InterpreterRuntime::at_safepoint)),
+ generate_safept_entry_for(ctos, CAST_FROM_FN_PTR(address, InterpreterRuntime::at_safepoint)),
+ generate_safept_entry_for(stos, CAST_FROM_FN_PTR(address, InterpreterRuntime::at_safepoint)),
+ generate_safept_entry_for(atos, CAST_FROM_FN_PTR(address, InterpreterRuntime::at_safepoint)),
+ generate_safept_entry_for(itos, CAST_FROM_FN_PTR(address, InterpreterRuntime::at_safepoint)),
+ generate_safept_entry_for(ltos, CAST_FROM_FN_PTR(address, InterpreterRuntime::at_safepoint)),
+ generate_safept_entry_for(ftos, CAST_FROM_FN_PTR(address, InterpreterRuntime::at_safepoint)),
+ generate_safept_entry_for(dtos, CAST_FROM_FN_PTR(address, InterpreterRuntime::at_safepoint)),
+ generate_safept_entry_for(vtos, CAST_FROM_FN_PTR(address, InterpreterRuntime::at_safepoint))
+ );
+ }
- { CodeletMark cm(_masm, "exception handling");
- // (Note: this is not safepoint safe because thread may return to compiled code)
- generate_throw_exception();
- }
+ { CodeletMark cm(_masm, "exception handling");
+ // (Note: this is not safepoint safe because thread may return to compiled code)
+ generate_throw_exception();
+ }
- { CodeletMark cm(_masm, "throw exception entrypoints");
- Interpreter::_throw_ArrayIndexOutOfBoundsException_entry = generate_ArrayIndexOutOfBounds_handler("java/lang/ArrayIndexOutOfBoundsException");
- Interpreter::_throw_ArrayStoreException_entry = generate_klass_exception_handler("java/lang/ArrayStoreException" );
- Interpreter::_throw_ArithmeticException_entry = generate_exception_handler("java/lang/ArithmeticException" , "/ by zero");
- Interpreter::_throw_ClassCastException_entry = generate_ClassCastException_handler();
- Interpreter::_throw_NullPointerException_entry = generate_exception_handler("java/lang/NullPointerException" , NULL );
- Interpreter::_throw_StackOverflowError_entry = generate_StackOverflowError_handler();
- }
+ { CodeletMark cm(_masm, "throw exception entrypoints");
+ Interpreter::_throw_ArrayIndexOutOfBoundsException_entry = generate_ArrayIndexOutOfBounds_handler("java/lang/ArrayIndexOutOfBoundsException");
+ Interpreter::_throw_ArrayStoreException_entry = generate_klass_exception_handler("java/lang/ArrayStoreException" );
+ Interpreter::_throw_ArithmeticException_entry = generate_exception_handler("java/lang/ArithmeticException" , "/ by zero");
+ Interpreter::_throw_ClassCastException_entry = generate_ClassCastException_handler();
+ Interpreter::_throw_NullPointerException_entry = generate_exception_handler("java/lang/NullPointerException" , NULL );
+ Interpreter::_throw_StackOverflowError_entry = generate_StackOverflowError_handler();
+ }
-#define method_entry(kind) \
- { CodeletMark cm(_masm, "method entry point (kind = " #kind ")"); \
- Interpreter::_entry_table[Interpreter::kind] = ((InterpreterGenerator*)this)->generate_method_entry(Interpreter::kind); \
- }
+#define method_entry(kind) \
+ { CodeletMark cm(_masm, "method entry point (kind = " #kind ")"); \
+ Interpreter::_entry_table[Interpreter::kind] = ((InterpreterGenerator*)this)->generate_method_entry(Interpreter::kind); \
+ }
- // all non-native method kinds
- method_entry(zerolocals)
- method_entry(zerolocals_synchronized)
- method_entry(empty)
- method_entry(accessor)
- method_entry(abstract)
- method_entry(java_lang_math_sin )
- method_entry(java_lang_math_cos )
- method_entry(java_lang_math_tan )
- method_entry(java_lang_math_abs )
- method_entry(java_lang_math_sqrt )
- method_entry(java_lang_math_log )
- method_entry(java_lang_math_log10)
- method_entry(java_lang_math_exp )
- method_entry(java_lang_math_pow )
- method_entry(java_lang_ref_reference_get)
+ // all non-native method kinds
+ method_entry(zerolocals)
+ method_entry(zerolocals_synchronized)
+ method_entry(empty)
+ method_entry(accessor)
+ method_entry(abstract)
+ method_entry(java_lang_math_sin )
+ method_entry(java_lang_math_cos )
+ method_entry(java_lang_math_tan )
+ method_entry(java_lang_math_abs )
+ method_entry(java_lang_math_sqrt )
+ method_entry(java_lang_math_log )
+ method_entry(java_lang_math_log10)
+ method_entry(java_lang_math_exp )
+ method_entry(java_lang_math_pow )
+ method_entry(java_lang_ref_reference_get)
- if (UseCRC32Intrinsics) {
- method_entry(java_util_zip_CRC32_update)
- method_entry(java_util_zip_CRC32_updateBytes)
- method_entry(java_util_zip_CRC32_updateByteBuffer)
- }
+ if (UseCRC32Intrinsics) {
+ method_entry(java_util_zip_CRC32_update)
+ method_entry(java_util_zip_CRC32_updateBytes)
+ method_entry(java_util_zip_CRC32_updateByteBuffer)
+ }
- initialize_method_handle_entries();
+ initialize_method_handle_entries();
- // all native method kinds (must be one contiguous block)
- Interpreter::_native_entry_begin = Interpreter::code()->code_end();
- method_entry(native)
- method_entry(native_synchronized)
- Interpreter::_native_entry_end = Interpreter::code()->code_end();
+ // all native method kinds (must be one contiguous block)
+ Interpreter::_native_entry_begin = Interpreter::code()->code_end();
+ method_entry(native)
+ method_entry(native_synchronized)
+ Interpreter::_native_entry_end = Interpreter::code()->code_end();
#undef method_entry
- // Bytecodes
- set_entry_points_for_all_bytes();
+ // Bytecodes
+ set_entry_points_for_all_bytes();
+ }
+ } while (CodeCacheExtensions::needs_other_interpreter_variant());
+
+ // installation of code in other places in the runtime
+ // (ExcutableCodeManager calls not needed to copy the entries)
set_safepoints_for_all_bytes();
}
@@ -445,6 +478,9 @@
void TemplateInterpreterGenerator::set_entry_points(Bytecodes::Code code) {
+ if (CodeCacheExtensions::skip_template_interpreter_entries(code)) {
+ return;
+ }
CodeletMark cm(_masm, Bytecodes::name(code), code);
// initialize entry points
assert(_unimplemented_bytecode != NULL, "should have been generated before");
@@ -474,6 +510,7 @@
EntryPoint entry(bep, cep, sep, aep, iep, lep, fep, dep, vep);
Interpreter::_normal_table.set_entry(code, entry);
Interpreter::_wentry_point[code] = wep;
+ CodeCacheExtensions::completed_template_interpreter_entries(_masm, code);
}
--- a/hotspot/src/share/vm/interpreter/templateInterpreter.hpp Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/src/share/vm/interpreter/templateInterpreter.hpp Wed Jul 01 10:53:26 2015 +0200
@@ -87,6 +87,7 @@
friend class TemplateInterpreterGenerator;
friend class InterpreterGenerator;
friend class TemplateTable;
+ friend class CodeCacheExtensions;
// friend class Interpreter;
public:
--- a/hotspot/src/share/vm/memory/heap.hpp Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/src/share/vm/memory/heap.hpp Wed Jul 01 10:53:26 2015 +0200
@@ -28,6 +28,7 @@
#include "code/codeBlob.hpp"
#include "memory/allocation.hpp"
#include "memory/virtualspace.hpp"
+#include "utilities/macros.hpp"
// Blocks
@@ -80,6 +81,7 @@
class CodeHeap : public CHeapObj<mtCode> {
friend class VMStructs;
+ friend class PregeneratedCodeHeap;
private:
VirtualSpace _memory; // the memory holding the blocks
VirtualSpace _segmap; // the memory holding the segment map
@@ -148,8 +150,8 @@
char* high() const { return _memory.high(); }
char* high_boundary() const { return _memory.high_boundary(); }
- bool contains(const void* p) const { return low_boundary() <= p && p < high(); }
- void* find_start(void* p) const; // returns the block containing p or NULL
+ virtual bool contains(const void* p) const { return low_boundary() <= p && p < high(); }
+ virtual void* find_start(void* p) const; // returns the block containing p or NULL
size_t alignment_unit() const; // alignment of any block
size_t alignment_offset() const; // offset of first byte of any block, within the enclosing alignment unit
static size_t header_size(); // returns the header size for each heap block
@@ -158,9 +160,9 @@
int freelist_length() const { return _freelist_length; } // number of elements in the freelist
// returns the first block or NULL
- void* first() const { return next_used(first_block()); }
+ virtual void* first() const { return next_used(first_block()); }
// returns the next block given a block p or NULL
- void* next(void* p) const { return next_used(next_block(block_start(p))); }
+ virtual void* next(void* p) const { return next_used(next_block(block_start(p))); }
// Statistics
size_t capacity() const;
--- a/hotspot/src/share/vm/memory/virtualspace.cpp Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/src/share/vm/memory/virtualspace.cpp Wed Jul 01 10:53:26 2015 +0200
@@ -23,6 +23,7 @@
*/
#include "precompiled.hpp"
+#include "code/codeCacheExtensions.hpp"
#include "memory/virtualspace.hpp"
#include "oops/markOop.hpp"
#include "oops/oop.inline.hpp"
@@ -603,7 +604,7 @@
ReservedCodeSpace::ReservedCodeSpace(size_t r_size,
size_t rs_align,
bool large) :
- ReservedSpace(r_size, rs_align, large, /*executable*/ true) {
+ ReservedSpace(r_size, rs_align, large, /*executable*/ CodeCacheExtensions::support_dynamic_code()) {
MemTracker::record_virtual_memory_type((address)base(), mtCode);
}
--- a/hotspot/src/share/vm/opto/regmask.cpp Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/src/share/vm/opto/regmask.cpp Wed Jul 01 10:53:26 2015 +0200
@@ -25,6 +25,8 @@
#include "precompiled.hpp"
#include "opto/ad.hpp"
#include "opto/compile.hpp"
+#include "opto/matcher.hpp"
+#include "opto/node.hpp"
#include "opto/regmask.hpp"
#define RM_SIZE _RM_SIZE /* a constant private to the class RegMask */
--- a/hotspot/src/share/vm/precompiled/precompiled.hpp Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/src/share/vm/precompiled/precompiled.hpp Wed Jul 01 10:53:26 2015 +0200
@@ -65,6 +65,7 @@
# include "classfile/vmSymbols.hpp"
# include "code/codeBlob.hpp"
# include "code/codeCache.hpp"
+# include "code/codeCacheExtensions.hpp"
# include "code/compressedStream.hpp"
# include "code/debugInfo.hpp"
# include "code/debugInfoRec.hpp"
--- a/hotspot/src/share/vm/prims/methodHandles.cpp Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/src/share/vm/prims/methodHandles.cpp Wed Jul 01 10:53:26 2015 +0200
@@ -26,6 +26,7 @@
#include "classfile/javaClasses.inline.hpp"
#include "classfile/stringTable.hpp"
#include "code/codeCache.hpp"
+#include "code/codeCacheExtensions.hpp"
#include "compiler/compileBroker.hpp"
#include "interpreter/interpreter.hpp"
#include "interpreter/oopMapCache.hpp"
@@ -100,6 +101,7 @@
StubCodeMark mark(this, "MethodHandle::interpreter_entry", vmIntrinsics::name_at(iid));
address entry = MethodHandles::generate_method_handle_interpreter_entry(_masm, iid);
if (entry != NULL) {
+ CodeCacheExtensions::handle_generated_pc(entry, vmIntrinsics::name_at(iid));
Interpreter::set_entry_for_kind(mk, entry);
}
// If the entry is not set, it will throw AbstractMethodError.
--- a/hotspot/src/share/vm/runtime/arguments.cpp Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/src/share/vm/runtime/arguments.cpp Wed Jul 01 10:53:26 2015 +0200
@@ -27,6 +27,7 @@
#include "classfile/javaAssertions.hpp"
#include "classfile/stringTable.hpp"
#include "classfile/symbolTable.hpp"
+#include "code/codeCacheExtensions.hpp"
#include "compiler/compilerOracle.hpp"
#include "gc/shared/cardTableRS.hpp"
#include "gc/shared/genCollectedHeap.hpp"
@@ -1586,6 +1587,8 @@
// Set up runtime image flags.
set_runtime_image_flags();
+
+ CodeCacheExtensions::set_ergonomics_flags();
}
void Arguments::set_parallel_gc_flags() {
--- a/hotspot/src/share/vm/runtime/arguments.hpp Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/src/share/vm/runtime/arguments.hpp Wed Jul 01 10:53:26 2015 +0200
@@ -224,6 +224,7 @@
class Arguments : AllStatic {
friend class VMStructs;
friend class JvmtiExport;
+ friend class CodeCacheExtensions;
public:
// Operation modi
enum Mode {
--- a/hotspot/src/share/vm/runtime/init.cpp Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/src/share/vm/runtime/init.cpp Wed Jul 01 10:53:26 2015 +0200
@@ -24,6 +24,7 @@
#include "precompiled.hpp"
#include "classfile/stringTable.hpp"
+#include "code/codeCacheExtensions.hpp"
#include "code/icBuffer.hpp"
#include "gc/shared/collectedHeap.hpp"
#include "interpreter/bytecodes.hpp"
@@ -101,15 +102,20 @@
classLoader_init();
compilationPolicy_init();
codeCache_init();
+ CodeCacheExtensions::initialize();
VM_Version_init();
+ CodeCacheExtensions::complete_step(CodeCacheExtensionsSteps::VMVersion);
os_init_globals();
stubRoutines_init1();
+ CodeCacheExtensions::complete_step(CodeCacheExtensionsSteps::StubRoutines1);
jint status = universe_init(); // dependent on codeCache_init and
// stubRoutines_init1 and metaspace_init.
if (status != JNI_OK)
return status;
+ CodeCacheExtensions::complete_step(CodeCacheExtensionsSteps::Universe);
interpreter_init(); // before any methods loaded
+ CodeCacheExtensions::complete_step(CodeCacheExtensionsSteps::Interpreter);
invocationCounter_init(); // before any methods loaded
marksweep_init();
accessFlags_init();
@@ -137,6 +143,7 @@
}
javaClasses_init(); // must happen after vtable initialization
stubRoutines_init2(); // note: StubRoutines need 2-phase init
+ CodeCacheExtensions::complete_step(CodeCacheExtensionsSteps::StubRoutines2);
#if INCLUDE_NMT
// Solaris stack is walkable only after stubRoutines are set up.
@@ -150,6 +157,7 @@
CommandLineFlags::printFlags(tty, false, PrintFlagsRanges);
}
+ CodeCacheExtensions::complete_step(CodeCacheExtensionsSteps::InitGlobals);
return JNI_OK;
}
--- a/hotspot/src/share/vm/runtime/sharedRuntime.cpp Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/src/share/vm/runtime/sharedRuntime.cpp Wed Jul 01 10:53:26 2015 +0200
@@ -27,6 +27,7 @@
#include "classfile/vmSymbols.hpp"
#include "code/codeCache.hpp"
#include "code/compiledIC.hpp"
+#include "code/codeCacheExtensions.hpp"
#include "code/scopeDesc.hpp"
#include "code/vtableStubs.hpp"
#include "compiler/abstractCompiler.hpp"
@@ -2307,19 +2308,35 @@
return _buffer;
}
+extern "C" void unexpected_adapter_call() {
+ ShouldNotCallThis();
+}
+
void AdapterHandlerLibrary::initialize() {
if (_adapters != NULL) return;
_adapters = new AdapterHandlerTable();
- // Create a special handler for abstract methods. Abstract methods
- // are never compiled so an i2c entry is somewhat meaningless, but
- // throw AbstractMethodError just in case.
- // Pass wrong_method_abstract for the c2i transitions to return
- // AbstractMethodError for invalid invocations.
- address wrong_method_abstract = SharedRuntime::get_handle_wrong_method_abstract_stub();
- _abstract_method_handler = AdapterHandlerLibrary::new_entry(new AdapterFingerPrint(0, NULL),
- StubRoutines::throw_AbstractMethodError_entry(),
- wrong_method_abstract, wrong_method_abstract);
+ if (!CodeCacheExtensions::skip_compiler_support()) {
+ // Create a special handler for abstract methods. Abstract methods
+ // are never compiled so an i2c entry is somewhat meaningless, but
+ // throw AbstractMethodError just in case.
+ // Pass wrong_method_abstract for the c2i transitions to return
+ // AbstractMethodError for invalid invocations.
+ address wrong_method_abstract = SharedRuntime::get_handle_wrong_method_abstract_stub();
+ _abstract_method_handler = AdapterHandlerLibrary::new_entry(new AdapterFingerPrint(0, NULL),
+ StubRoutines::throw_AbstractMethodError_entry(),
+ wrong_method_abstract, wrong_method_abstract);
+ } else {
+ // Adapters are not supposed to be used.
+ // Generate a special one to cause an error if used (and store this
+ // singleton in place of the useless _abstract_method_error adapter).
+ address entry = (address) &unexpected_adapter_call;
+ _abstract_method_handler = AdapterHandlerLibrary::new_entry(new AdapterFingerPrint(0, NULL),
+ entry,
+ entry,
+ entry);
+
+ }
}
AdapterHandlerEntry* AdapterHandlerLibrary::new_entry(AdapterFingerPrint* fingerprint,
@@ -2346,6 +2363,15 @@
// make sure data structure is initialized
initialize();
+ if (CodeCacheExtensions::skip_compiler_support()) {
+ // adapters are useless and should not be used, including the
+ // abstract_method_handler. However, some callers check that
+ // an adapter was installed.
+ // Return the singleton adapter, stored into _abstract_method_handler
+ // and modified to cause an error if we ever call it.
+ return _abstract_method_handler;
+ }
+
if (method->is_abstract()) {
return _abstract_method_handler;
}
--- a/hotspot/src/share/vm/runtime/stubCodeGenerator.cpp Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/src/share/vm/runtime/stubCodeGenerator.cpp Wed Jul 01 10:53:26 2015 +0200
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 1997, 2014, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 1997, 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -26,6 +26,7 @@
#include "asm/macroAssembler.hpp"
#include "asm/macroAssembler.inline.hpp"
#include "code/codeCache.hpp"
+#include "code/codeCacheExtensions.hpp"
#include "compiler/disassembler.hpp"
#include "oops/oop.inline.hpp"
#include "prims/forte.hpp"
--- a/hotspot/src/share/vm/runtime/stubCodeGenerator.hpp Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/src/share/vm/runtime/stubCodeGenerator.hpp Wed Jul 01 10:53:26 2015 +0200
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 1997, 2012, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 1997, 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -67,14 +67,14 @@
static StubCodeDesc* desc_for_index(int); // returns the code descriptor for the index or NULL
static const char* name_for(address pc); // returns the name of the code containing pc or NULL
- StubCodeDesc(const char* group, const char* name, address begin) {
+ StubCodeDesc(const char* group, const char* name, address begin, address end = NULL) {
assert(name != NULL, "no name specified");
_next = _list;
_group = group;
_name = name;
_index = ++_count; // (never zero)
_begin = begin;
- _end = NULL;
+ _end = end;
_list = this;
};
--- a/hotspot/src/share/vm/runtime/stubRoutines.cpp Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/src/share/vm/runtime/stubRoutines.cpp Wed Jul 01 10:53:26 2015 +0200
@@ -24,6 +24,7 @@
#include "precompiled.hpp"
#include "asm/codeBuffer.hpp"
+#include "code/codeCacheExtensions.hpp"
#include "memory/resourceArea.hpp"
#include "oops/oop.inline.hpp"
#include "runtime/interfaceSupport.hpp"
@@ -182,6 +183,12 @@
// simple tests of generated arraycopy functions
static void test_arraycopy_func(address func, int alignment) {
+ if (CodeCacheExtensions::use_pregenerated_interpreter() || !CodeCacheExtensions::is_executable(func)) {
+ // Exit safely if stubs were generated but cannot be used.
+ // Also excluding pregenerated interpreter since the code may depend on
+ // some registers being properly initialized (for instance Rthread)
+ return;
+ }
int v = 0xcc;
int v2 = 0x11;
jlong lbuffer[8];
--- a/hotspot/src/share/vm/runtime/thread.cpp Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/src/share/vm/runtime/thread.cpp Wed Jul 01 10:53:26 2015 +0200
@@ -28,6 +28,7 @@
#include "classfile/systemDictionary.hpp"
#include "classfile/vmSymbols.hpp"
#include "code/codeCache.hpp"
+#include "code/codeCacheExtensions.hpp"
#include "code/scopeDesc.hpp"
#include "compiler/compileBroker.hpp"
#include "gc/shared/gcLocker.inline.hpp"
@@ -3587,6 +3588,8 @@
}
}
+ CodeCacheExtensions::complete_step(CodeCacheExtensionsSteps::CreateVM);
+
create_vm_timer.end();
#ifdef ASSERT
_vm_complete = true;
--- a/hotspot/src/share/vm/runtime/vmStructs.cpp Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/src/share/vm/runtime/vmStructs.cpp Wed Jul 01 10:53:26 2015 +0200
@@ -182,6 +182,8 @@
#include "runtime/vmStructs_trace.hpp"
#endif
+#include "runtime/vmStructs_ext.hpp"
+
#ifdef COMPILER2
#include "opto/addnode.hpp"
#include "opto/block.hpp"
@@ -2961,6 +2963,9 @@
GENERATE_STATIC_VM_STRUCT_ENTRY)
#endif
+ VM_STRUCTS_EXT(GENERATE_NONSTATIC_VM_STRUCT_ENTRY,
+ GENERATE_STATIC_VM_STRUCT_ENTRY)
+
VM_STRUCTS_CPU(GENERATE_NONSTATIC_VM_STRUCT_ENTRY,
GENERATE_STATIC_VM_STRUCT_ENTRY,
GENERATE_UNCHECKED_NONSTATIC_VM_STRUCT_ENTRY,
@@ -3011,6 +3016,9 @@
GENERATE_TOPLEVEL_VM_TYPE_ENTRY)
#endif
+ VM_TYPES_EXT(GENERATE_VM_TYPE_ENTRY,
+ GENERATE_TOPLEVEL_VM_TYPE_ENTRY)
+
VM_TYPES_CPU(GENERATE_VM_TYPE_ENTRY,
GENERATE_TOPLEVEL_VM_TYPE_ENTRY,
GENERATE_OOP_VM_TYPE_ENTRY,
@@ -3120,6 +3128,9 @@
CHECK_STATIC_VM_STRUCT_ENTRY);
#endif
+ VM_STRUCTS_EXT(CHECK_NONSTATIC_VM_STRUCT_ENTRY,
+ CHECK_STATIC_VM_STRUCT_ENTRY);
+
VM_STRUCTS_CPU(CHECK_NONSTATIC_VM_STRUCT_ENTRY,
CHECK_STATIC_VM_STRUCT_ENTRY,
CHECK_NO_OP,
@@ -3166,6 +3177,9 @@
CHECK_SINGLE_ARG_VM_TYPE_NO_OP);
#endif
+ VM_TYPES_EXT(CHECK_VM_TYPE_ENTRY,
+ CHECK_SINGLE_ARG_VM_TYPE_NO_OP);
+
VM_TYPES_CPU(CHECK_VM_TYPE_ENTRY,
CHECK_SINGLE_ARG_VM_TYPE_NO_OP,
CHECK_SINGLE_ARG_VM_TYPE_NO_OP,
@@ -3234,6 +3248,9 @@
ENSURE_FIELD_TYPE_PRESENT));
#endif
+ debug_only(VM_STRUCTS_EXT(ENSURE_FIELD_TYPE_PRESENT,
+ ENSURE_FIELD_TYPE_PRESENT));
+
debug_only(VM_STRUCTS_CPU(ENSURE_FIELD_TYPE_PRESENT,
ENSURE_FIELD_TYPE_PRESENT,
CHECK_NO_OP,
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hotspot/src/share/vm/runtime/vmStructs_ext.hpp Wed Jul 01 10:53:26 2015 +0200
@@ -0,0 +1,33 @@
+/*
+ * Copyright (c) 2015, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ *
+ */
+
+#ifndef SHARE_VM_RUNTIME_VMSTRUCTS_EXT_HPP
+#define SHARE_VM_RUNTIME_VMSTRUCTS_EXT_HPP
+
+#define VM_STRUCTS_EXT(a, b)
+
+#define VM_TYPES_EXT(a, b)
+
+
+#endif // SHARE_VM_RUNTIME_VMSTRUCTS_EXT_HPP
--- a/hotspot/src/share/vm/runtime/vm_operations.cpp Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/src/share/vm/runtime/vm_operations.cpp Wed Jul 01 10:53:26 2015 +0200
@@ -26,6 +26,7 @@
#include "classfile/symbolTable.hpp"
#include "classfile/vmSymbols.hpp"
#include "code/codeCache.hpp"
+#include "code/codeCacheExtensions.hpp"
#include "compiler/compileBroker.hpp"
#include "compiler/compilerOracle.hpp"
#include "gc/shared/isGCActiveMark.hpp"
@@ -369,6 +370,8 @@
Thread * VM_Exit::_shutdown_thread = NULL;
int VM_Exit::set_vm_exited() {
+ CodeCacheExtensions::complete_step(CodeCacheExtensionsSteps::LastStep);
+
Thread * thr_cur = ThreadLocalStorage::get_thread_slow();
assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint already");
--- a/hotspot/src/share/vm/runtime/vm_version.cpp Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/src/share/vm/runtime/vm_version.cpp Wed Jul 01 10:53:26 2015 +0200
@@ -23,6 +23,7 @@
*/
#include "precompiled.hpp"
+#include "code/codeCacheExtensions.hpp"
#include "memory/universe.hpp"
#include "oops/oop.inline.hpp"
#include "runtime/arguments.hpp"
@@ -155,6 +156,9 @@
const char* Abstract_VM_Version::vm_info_string() {
+ if (CodeCacheExtensions::use_pregenerated_interpreter()) {
+ return "interpreted mode, pregenerated";
+ }
switch (Arguments::mode()) {
case Arguments::_int:
return UseSharedSpaces ? "interpreted mode, sharing" : "interpreted mode";
--- a/hotspot/src/share/vm/utilities/globalDefinitions.hpp Tue Jun 30 15:26:20 2015 -0700
+++ b/hotspot/src/share/vm/utilities/globalDefinitions.hpp Wed Jul 01 10:53:26 2015 +0200
@@ -1367,6 +1367,7 @@
#define UINT32_FORMAT_W(width) "%" #width PRIu32
#define PTR32_FORMAT "0x%08" PRIx32
+#define PTR32_FORMAT_W(width) "0x%" #width PRIx32
// Format 64-bit quantities.
#define INT64_FORMAT "%" PRId64