--- a/hotspot/agent/src/share/classes/sun/jvm/hotspot/oops/ConstantPool.java Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/agent/src/share/classes/sun/jvm/hotspot/oops/ConstantPool.java Fri Nov 09 08:20:03 2012 -0800
@@ -121,7 +121,7 @@
Address addr = cache.getValue(getAddress());
return (ConstantPoolCache) VMObjectFactory.newObject(ConstantPoolCache.class, addr);
}
- public Klass getPoolHolder() { return (Klass) poolHolder.getValue(this); }
+ public InstanceKlass getPoolHolder() { return (InstanceKlass)poolHolder.getValue(this); }
public int getLength() { return (int)length.getValue(getAddress()); }
public Oop getResolvedReferences() {
Address handle = resolvedReferences.getValue(getAddress());
--- a/hotspot/agent/src/share/classes/sun/jvm/hotspot/oops/Method.java Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/agent/src/share/classes/sun/jvm/hotspot/oops/Method.java Fri Nov 09 08:20:03 2012 -0800
@@ -177,7 +177,7 @@
bci. It is required that there is currently a bytecode at this
bci. */
public int getOrigBytecodeAt(int bci) {
- BreakpointInfo bp = ((InstanceKlass) getMethodHolder()).getBreakpoints();
+ BreakpointInfo bp = getMethodHolder().getBreakpoints();
for (; bp != null; bp = bp.getNext()) {
if (bp.match(this, bci)) {
return bp.getOrigBytecode();
@@ -238,7 +238,7 @@
}
// Method holder (the Klass holding this method)
- public Klass getMethodHolder() { return getConstants().getPoolHolder(); }
+ public InstanceKlass getMethodHolder() { return getConstants().getPoolHolder(); }
// Access flags
public boolean isPublic() { return getAccessFlagsObj().isPublic(); }
--- a/hotspot/make/hotspot_version Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/make/hotspot_version Fri Nov 09 08:20:03 2012 -0800
@@ -35,7 +35,7 @@
HS_MAJOR_VER=25
HS_MINOR_VER=0
-HS_BUILD_NUMBER=08
+HS_BUILD_NUMBER=09
JDK_MAJOR_VER=1
JDK_MINOR_VER=8
--- a/hotspot/make/linux/makefiles/defs.make Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/make/linux/makefiles/defs.make Fri Nov 09 08:20:03 2012 -0800
@@ -170,68 +170,70 @@
# overridden in some situations, e.g., a BUILD_FLAVOR != product
# build.
- ifeq ($(BUILD_FLAVOR), product)
- FULL_DEBUG_SYMBOLS ?= 1
- ENABLE_FULL_DEBUG_SYMBOLS = $(FULL_DEBUG_SYMBOLS)
- else
- # debug variants always get Full Debug Symbols (if available)
- ENABLE_FULL_DEBUG_SYMBOLS = 1
- endif
- _JUNK_ := $(shell \
- echo >&2 "INFO: ENABLE_FULL_DEBUG_SYMBOLS=$(ENABLE_FULL_DEBUG_SYMBOLS)")
- # since objcopy is optional, we set ZIP_DEBUGINFO_FILES later
+ # Due to the multiple sub-make processes that occur this logic gets
+ # executed multiple times. We reduce the noise by at least checking that
+ # BUILD_FLAVOR has been set.
+ ifneq ($(BUILD_FLAVOR),)
+ ifeq ($(BUILD_FLAVOR), product)
+ FULL_DEBUG_SYMBOLS ?= 1
+ ENABLE_FULL_DEBUG_SYMBOLS = $(FULL_DEBUG_SYMBOLS)
+ else
+ # debug variants always get Full Debug Symbols (if available)
+ ENABLE_FULL_DEBUG_SYMBOLS = 1
+ endif
+ _JUNK_ := $(shell \
+ echo >&2 "INFO: ENABLE_FULL_DEBUG_SYMBOLS=$(ENABLE_FULL_DEBUG_SYMBOLS)")
+ # since objcopy is optional, we set ZIP_DEBUGINFO_FILES later
- ifeq ($(ENABLE_FULL_DEBUG_SYMBOLS),1)
- # Default OBJCOPY comes from GNU Binutils on Linux:
- DEF_OBJCOPY=/usr/bin/objcopy
- ifdef CROSS_COMPILE_ARCH
- # don't try to generate .debuginfo files when cross compiling
- _JUNK_ := $(shell \
- echo >&2 "INFO: cross compiling for ARCH $(CROSS_COMPILE_ARCH)," \
- "skipping .debuginfo generation.")
- OBJCOPY=
- else
+ ifeq ($(ENABLE_FULL_DEBUG_SYMBOLS),1)
+ # Default OBJCOPY comes from GNU Binutils on Linux
+ ifeq ($(CROSS_COMPILE_ARCH),)
+ DEF_OBJCOPY=/usr/bin/objcopy
+ else
+ # Assume objcopy is part of the cross-compilation toolset
+ ifneq ($(ALT_COMPILER_PATH),)
+ DEF_OBJCOPY=$(ALT_COMPILER_PATH)/objcopy
+ endif
+ endif
OBJCOPY=$(shell test -x $(DEF_OBJCOPY) && echo $(DEF_OBJCOPY))
ifneq ($(ALT_OBJCOPY),)
_JUNK_ := $(shell echo >&2 "INFO: ALT_OBJCOPY=$(ALT_OBJCOPY)")
OBJCOPY=$(shell test -x $(ALT_OBJCOPY) && echo $(ALT_OBJCOPY))
endif
- endif
- else
- OBJCOPY=
- endif
- ifeq ($(OBJCOPY),)
- _JUNK_ := $(shell \
- echo >&2 "INFO: no objcopy cmd found so cannot create .debuginfo files.")
- ENABLE_FULL_DEBUG_SYMBOLS=0
- _JUNK_ := $(shell \
- echo >&2 "INFO: ENABLE_FULL_DEBUG_SYMBOLS=$(ENABLE_FULL_DEBUG_SYMBOLS)")
- else
- _JUNK_ := $(shell \
- echo >&2 "INFO: $(OBJCOPY) cmd found so will create .debuginfo files.")
+ ifeq ($(OBJCOPY),)
+ _JUNK_ := $(shell \
+ echo >&2 "INFO: no objcopy cmd found so cannot create .debuginfo files. You may need to set ALT_OBJCOPY.")
+ ENABLE_FULL_DEBUG_SYMBOLS=0
+ _JUNK_ := $(shell \
+ echo >&2 "INFO: ENABLE_FULL_DEBUG_SYMBOLS=$(ENABLE_FULL_DEBUG_SYMBOLS)")
+ else
+ _JUNK_ := $(shell \
+ echo >&2 "INFO: $(OBJCOPY) cmd found so will create .debuginfo files.")
- # Library stripping policies for .debuginfo configs:
- # all_strip - strips everything from the library
- # min_strip - strips most stuff from the library; leaves minimum symbols
- # no_strip - does not strip the library at all
- #
- # Oracle security policy requires "all_strip". A waiver was granted on
- # 2011.09.01 that permits using "min_strip" in the Java JDK and Java JRE.
- #
- # Currently, STRIP_POLICY is only used when Full Debug Symbols is enabled.
- #
- STRIP_POLICY ?= min_strip
+ # Library stripping policies for .debuginfo configs:
+ # all_strip - strips everything from the library
+ # min_strip - strips most stuff from the library; leaves minimum symbols
+ # no_strip - does not strip the library at all
+ #
+ # Oracle security policy requires "all_strip". A waiver was granted on
+ # 2011.09.01 that permits using "min_strip" in the Java JDK and Java JRE.
+ #
+ # Currently, STRIP_POLICY is only used when Full Debug Symbols is enabled.
+ #
+ STRIP_POLICY ?= min_strip
- _JUNK_ := $(shell \
- echo >&2 "INFO: STRIP_POLICY=$(STRIP_POLICY)")
+ _JUNK_ := $(shell \
+ echo >&2 "INFO: STRIP_POLICY=$(STRIP_POLICY)")
- ZIP_DEBUGINFO_FILES ?= 1
+ ZIP_DEBUGINFO_FILES ?= 1
- _JUNK_ := $(shell \
- echo >&2 "INFO: ZIP_DEBUGINFO_FILES=$(ZIP_DEBUGINFO_FILES)")
- endif
-endif
+ _JUNK_ := $(shell \
+ echo >&2 "INFO: ZIP_DEBUGINFO_FILES=$(ZIP_DEBUGINFO_FILES)")
+ endif
+ endif # ENABLE_FULL_DEBUG_SYMBOLS=1
+ endif # BUILD_FLAVOR
+endif # JDK_6_OR_EARLIER
JDK_INCLUDE_SUBDIR=linux
--- a/hotspot/make/linux/makefiles/vm.make Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/make/linux/makefiles/vm.make Fri Nov 09 08:20:03 2012 -0800
@@ -336,24 +336,23 @@
fi \
fi \
}
-ifeq ($(CROSS_COMPILE_ARCH),)
- ifeq ($(ENABLE_FULL_DEBUG_SYMBOLS),1)
+
+ifeq ($(ENABLE_FULL_DEBUG_SYMBOLS),1)
$(QUIETLY) $(OBJCOPY) --only-keep-debug $@ $(LIBJVM_DEBUGINFO)
$(QUIETLY) $(OBJCOPY) --add-gnu-debuglink=$(LIBJVM_DEBUGINFO) $@
- ifeq ($(STRIP_POLICY),all_strip)
+ ifeq ($(STRIP_POLICY),all_strip)
$(QUIETLY) $(STRIP) $@
- else
- ifeq ($(STRIP_POLICY),min_strip)
+ else
+ ifeq ($(STRIP_POLICY),min_strip)
$(QUIETLY) $(STRIP) -g $@
- # implied else here is no stripping at all
- endif
+ # implied else here is no stripping at all
endif
+ endif
$(QUIETLY) [ -f $(LIBJVM_G_DEBUGINFO) ] || ln -s $(LIBJVM_DEBUGINFO) $(LIBJVM_G_DEBUGINFO)
- ifeq ($(ZIP_DEBUGINFO_FILES),1)
+ ifeq ($(ZIP_DEBUGINFO_FILES),1)
$(ZIPEXE) -q -y $(LIBJVM_DIZ) $(LIBJVM_DEBUGINFO) $(LIBJVM_G_DEBUGINFO)
$(RM) $(LIBJVM_DEBUGINFO) $(LIBJVM_G_DEBUGINFO)
[ -f $(LIBJVM_G_DIZ) ] || { ln -s $(LIBJVM_DIZ) $(LIBJVM_G_DIZ); }
- endif
endif
endif
--- a/hotspot/make/solaris/makefiles/defs.make Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/make/solaris/makefiles/defs.make Fri Nov 09 08:20:03 2012 -0800
@@ -109,60 +109,63 @@
# overridden in some situations, e.g., a BUILD_FLAVOR != product
# build.
- ifeq ($(BUILD_FLAVOR), product)
- FULL_DEBUG_SYMBOLS ?= 1
- ENABLE_FULL_DEBUG_SYMBOLS = $(FULL_DEBUG_SYMBOLS)
- else
- # debug variants always get Full Debug Symbols (if available)
- ENABLE_FULL_DEBUG_SYMBOLS = 1
- endif
- _JUNK_ := $(shell \
- echo >&2 "INFO: ENABLE_FULL_DEBUG_SYMBOLS=$(ENABLE_FULL_DEBUG_SYMBOLS)")
- # since objcopy is optional, we set ZIP_DEBUGINFO_FILES later
-
- ifeq ($(ENABLE_FULL_DEBUG_SYMBOLS),1)
- # Default OBJCOPY comes from the SUNWbinutils package:
- DEF_OBJCOPY=/usr/sfw/bin/gobjcopy
- OBJCOPY=$(shell test -x $(DEF_OBJCOPY) && echo $(DEF_OBJCOPY))
- ifneq ($(ALT_OBJCOPY),)
- _JUNK_ := $(shell echo >&2 "INFO: ALT_OBJCOPY=$(ALT_OBJCOPY)")
- OBJCOPY=$(shell test -x $(ALT_OBJCOPY) && echo $(ALT_OBJCOPY))
+ # Due to the multiple sub-make processes that occur this logic gets
+ # executed multiple times. We reduce the noise by at least checking that
+ # BUILD_FLAVOR has been set.
+ ifneq ($(BUILD_FLAVOR),)
+ ifeq ($(BUILD_FLAVOR), product)
+ FULL_DEBUG_SYMBOLS ?= 1
+ ENABLE_FULL_DEBUG_SYMBOLS = $(FULL_DEBUG_SYMBOLS)
+ else
+ # debug variants always get Full Debug Symbols (if available)
+ ENABLE_FULL_DEBUG_SYMBOLS = 1
endif
- else
- OBJCOPY=
- endif
-
- ifeq ($(OBJCOPY),)
- _JUNK_ := $(shell \
- echo >&2 "INFO: no objcopy cmd found so cannot create .debuginfo files.")
- ENABLE_FULL_DEBUG_SYMBOLS=0
_JUNK_ := $(shell \
echo >&2 "INFO: ENABLE_FULL_DEBUG_SYMBOLS=$(ENABLE_FULL_DEBUG_SYMBOLS)")
- else
- _JUNK_ := $(shell \
- echo >&2 "INFO: $(OBJCOPY) cmd found so will create .debuginfo files.")
+ # since objcopy is optional, we set ZIP_DEBUGINFO_FILES later
+
+ ifeq ($(ENABLE_FULL_DEBUG_SYMBOLS),1)
+ # Default OBJCOPY comes from the SUNWbinutils package:
+ DEF_OBJCOPY=/usr/sfw/bin/gobjcopy
+ OBJCOPY=$(shell test -x $(DEF_OBJCOPY) && echo $(DEF_OBJCOPY))
+ ifneq ($(ALT_OBJCOPY),)
+ _JUNK_ := $(shell echo >&2 "INFO: ALT_OBJCOPY=$(ALT_OBJCOPY)")
+ OBJCOPY=$(shell test -x $(ALT_OBJCOPY) && echo $(ALT_OBJCOPY))
+ endif
+
+ ifeq ($(OBJCOPY),)
+ _JUNK_ := $(shell \
+ echo >&2 "INFO: no objcopy cmd found so cannot create .debuginfo files.")
+ ENABLE_FULL_DEBUG_SYMBOLS=0
+ _JUNK_ := $(shell \
+ echo >&2 "INFO: ENABLE_FULL_DEBUG_SYMBOLS=$(ENABLE_FULL_DEBUG_SYMBOLS)")
+ else
+ _JUNK_ := $(shell \
+ echo >&2 "INFO: $(OBJCOPY) cmd found so will create .debuginfo files.")
- # Library stripping policies for .debuginfo configs:
- # all_strip - strips everything from the library
- # min_strip - strips most stuff from the library; leaves minimum symbols
- # no_strip - does not strip the library at all
- #
- # Oracle security policy requires "all_strip". A waiver was granted on
- # 2011.09.01 that permits using "min_strip" in the Java JDK and Java JRE.
- #
- # Currently, STRIP_POLICY is only used when Full Debug Symbols is enabled.
- #
- STRIP_POLICY ?= min_strip
+ # Library stripping policies for .debuginfo configs:
+ # all_strip - strips everything from the library
+ # min_strip - strips most stuff from the library; leaves minimum symbols
+ # no_strip - does not strip the library at all
+ #
+ # Oracle security policy requires "all_strip". A waiver was granted on
+ # 2011.09.01 that permits using "min_strip" in the Java JDK and Java JRE.
+ #
+ # Currently, STRIP_POLICY is only used when Full Debug Symbols is enabled.
+ #
+ STRIP_POLICY ?= min_strip
- _JUNK_ := $(shell \
- echo >&2 "INFO: STRIP_POLICY=$(STRIP_POLICY)")
+ _JUNK_ := $(shell \
+ echo >&2 "INFO: STRIP_POLICY=$(STRIP_POLICY)")
- ZIP_DEBUGINFO_FILES ?= 1
+ ZIP_DEBUGINFO_FILES ?= 1
- _JUNK_ := $(shell \
- echo >&2 "INFO: ZIP_DEBUGINFO_FILES=$(ZIP_DEBUGINFO_FILES)")
- endif
-endif
+ _JUNK_ := $(shell \
+ echo >&2 "INFO: ZIP_DEBUGINFO_FILES=$(ZIP_DEBUGINFO_FILES)")
+ endif
+ endif # ENABLE_FULL_DEBUG_SYMBOLS=1
+ endif # BUILD_FLAVOR
+endif # JDK_6_OR_EARLIER
JDK_INCLUDE_SUBDIR=solaris
--- a/hotspot/make/windows/makefiles/defs.make Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/make/windows/makefiles/defs.make Fri Nov 09 08:20:03 2012 -0800
@@ -131,23 +131,29 @@
# overridden in some situations, e.g., a BUILD_FLAVOR != product
# build.
-ifeq ($(BUILD_FLAVOR), product)
- FULL_DEBUG_SYMBOLS ?= 1
- ENABLE_FULL_DEBUG_SYMBOLS = $(FULL_DEBUG_SYMBOLS)
-else
- # debug variants always get Full Debug Symbols (if available)
- ENABLE_FULL_DEBUG_SYMBOLS = 1
+# Due to the multiple sub-make processes that occur this logic gets
+# executed multiple times. We reduce the noise by at least checking that
+# BUILD_FLAVOR has been set.
+ifneq ($(BUILD_FLAVOR),)
+ ifeq ($(BUILD_FLAVOR), product)
+ FULL_DEBUG_SYMBOLS ?= 1
+ ENABLE_FULL_DEBUG_SYMBOLS = $(FULL_DEBUG_SYMBOLS)
+ else
+ # debug variants always get Full Debug Symbols (if available)
+ ENABLE_FULL_DEBUG_SYMBOLS = 1
+ endif
+ _JUNK_ := $(shell \
+ echo >&2 "INFO: ENABLE_FULL_DEBUG_SYMBOLS=$(ENABLE_FULL_DEBUG_SYMBOLS)")
+ MAKE_ARGS += ENABLE_FULL_DEBUG_SYMBOLS=$(ENABLE_FULL_DEBUG_SYMBOLS)
+
+ ifeq ($(ENABLE_FULL_DEBUG_SYMBOLS),1)
+ ZIP_DEBUGINFO_FILES ?= 1
+ else
+ ZIP_DEBUGINFO_FILES=0
+ endif
+ MAKE_ARGS += ZIP_DEBUGINFO_FILES=$(ZIP_DEBUGINFO_FILES)
endif
-_JUNK_ := $(shell \
- echo >&2 "INFO: ENABLE_FULL_DEBUG_SYMBOLS=$(ENABLE_FULL_DEBUG_SYMBOLS)")
-MAKE_ARGS += ENABLE_FULL_DEBUG_SYMBOLS=$(ENABLE_FULL_DEBUG_SYMBOLS)
-ifeq ($(ENABLE_FULL_DEBUG_SYMBOLS),1)
- ZIP_DEBUGINFO_FILES ?= 1
-else
- ZIP_DEBUGINFO_FILES=0
-endif
-MAKE_ARGS += ZIP_DEBUGINFO_FILES=$(ZIP_DEBUGINFO_FILES)
MAKE_ARGS += RM="$(RM)"
MAKE_ARGS += ZIPEXE=$(ZIPEXE)
--- a/hotspot/src/cpu/sparc/vm/sharedRuntime_sparc.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/cpu/sparc/vm/sharedRuntime_sparc.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -2322,7 +2322,7 @@
// Pre-load a static method's oop into O1. Used both by locking code and
// the normal JNI call code.
if (method->is_static() && !is_critical_native) {
- __ set_oop_constant(JNIHandles::make_local(Klass::cast(method->method_holder())->java_mirror()), O1);
+ __ set_oop_constant(JNIHandles::make_local(method->method_holder()->java_mirror()), O1);
// Now handlize the static class mirror in O1. It's known not-null.
__ st_ptr(O1, SP, klass_offset + STACK_BIAS);
--- a/hotspot/src/cpu/x86/vm/sharedRuntime_x86_32.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/cpu/x86/vm/sharedRuntime_x86_32.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -1936,7 +1936,7 @@
if (method->is_static() && !is_critical_native) {
// load opp into a register
- __ movoop(oop_handle_reg, JNIHandles::make_local(Klass::cast(method->method_holder())->java_mirror()));
+ __ movoop(oop_handle_reg, JNIHandles::make_local(method->method_holder()->java_mirror()));
// Now handlize the static class mirror it's known not-null.
__ movptr(Address(rsp, klass_offset), oop_handle_reg);
--- a/hotspot/src/cpu/x86/vm/sharedRuntime_x86_64.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/cpu/x86/vm/sharedRuntime_x86_64.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -2179,7 +2179,7 @@
if (method->is_static() && !is_critical_native) {
// load oop into a register
- __ movoop(oop_handle_reg, JNIHandles::make_local(Klass::cast(method->method_holder())->java_mirror()));
+ __ movoop(oop_handle_reg, JNIHandles::make_local(method->method_holder()->java_mirror()));
// Now handlize the static class mirror it's known not-null.
__ movptr(Address(rsp, klass_offset), oop_handle_reg);
--- a/hotspot/src/cpu/x86/vm/vm_version_x86.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/cpu/x86/vm/vm_version_x86.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -488,8 +488,8 @@
}
// The AES intrinsic stubs require AES instruction support (of course)
- // but also require AVX mode for misaligned SSE access
- if (UseAES && (UseAVX > 0)) {
+ // but also require AVX and sse3 modes for instructions it use.
+ if (UseAES && (UseAVX > 0) && (UseSSE > 2)) {
if (FLAG_IS_DEFAULT(UseAESIntrinsics)) {
UseAESIntrinsics = true;
}
--- a/hotspot/src/share/tools/hsdis/hsdis-demo.c Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/tools/hsdis/hsdis-demo.c Fri Nov 09 08:20:03 2012 -0800
@@ -85,9 +85,11 @@
#include "dlfcn.h"
-#define DECODE_INSTRUCTIONS_NAME "decode_instructions_virtual"
+#define DECODE_INSTRUCTIONS_VIRTUAL_NAME "decode_instructions_virtual"
+#define DECODE_INSTRUCTIONS_NAME "decode_instructions"
#define HSDIS_NAME "hsdis"
static void* decode_instructions_pv = 0;
+static void* decode_instructions_sv = 0;
static const char* hsdis_path[] = {
HSDIS_NAME"-"LIBARCH LIB_EXT,
"./" HSDIS_NAME"-"LIBARCH LIB_EXT,
@@ -101,11 +103,12 @@
void* dllib = NULL;
const char* *next_in_path = hsdis_path;
while (1) {
- decode_instructions_pv = dlsym(dllib, DECODE_INSTRUCTIONS_NAME);
- if (decode_instructions_pv != NULL)
+ decode_instructions_pv = dlsym(dllib, DECODE_INSTRUCTIONS_VIRTUAL_NAME);
+ decode_instructions_sv = dlsym(dllib, DECODE_INSTRUCTIONS_NAME);
+ if (decode_instructions_pv != NULL || decode_instructions_sv != NULL)
return NULL;
if (dllib != NULL)
- return "plugin does not defined "DECODE_INSTRUCTIONS_NAME;
+ return "plugin does not defined "DECODE_INSTRUCTIONS_VIRTUAL_NAME" and "DECODE_INSTRUCTIONS_NAME;
for (dllib = NULL; dllib == NULL; ) {
const char* next_lib = (*next_in_path++);
if (next_lib == NULL)
@@ -213,20 +216,44 @@
printf("%s: %s\n", err, dlerror());
exit(1);
}
- printf("Decoding from %p to %p...\n", from, to);
- decode_instructions_ftype decode_instructions
- = (decode_instructions_ftype) decode_instructions_pv;
+ decode_func_vtype decode_instructions_v
+ = (decode_func_vtype) decode_instructions_pv;
+ decode_func_stype decode_instructions_s
+ = (decode_func_stype) decode_instructions_sv;
void* res;
- if (raw && xml) {
- res = (*decode_instructions)(from, to, (unsigned char*)from, to - from, simple_handle_event, stdout, NULL, stdout, options);
- } else if (raw) {
- res = (*decode_instructions)(from, to, (unsigned char*)from, to - from, simple_handle_event, stdout, NULL, stdout, options);
- } else {
- res = (*decode_instructions)(from, to, (unsigned char*)from, to - from,
- handle_event, (void*) event_cookie,
- fprintf_callback, stdout,
- options);
+ if (decode_instructions_pv != NULL) {
+ printf("\nDecoding from %p to %p...with %s\n", from, to, DECODE_INSTRUCTIONS_VIRTUAL_NAME);
+ if (raw) {
+ res = (*decode_instructions_v)(from, to,
+ (unsigned char*)from, to - from,
+ simple_handle_event, stdout,
+ NULL, stdout,
+ options, 0);
+ } else {
+ res = (*decode_instructions_v)(from, to,
+ (unsigned char*)from, to - from,
+ handle_event, (void*) event_cookie,
+ fprintf_callback, stdout,
+ options, 0);
+ }
+ if (res != (void*)to)
+ printf("*** Result was %p!\n", res);
}
- if (res != (void*)to)
- printf("*** Result was %p!\n", res);
+ void* sres;
+ if (decode_instructions_sv != NULL) {
+ printf("\nDecoding from %p to %p...with old decode_instructions\n", from, to, DECODE_INSTRUCTIONS_NAME);
+ if (raw) {
+ sres = (*decode_instructions_s)(from, to,
+ simple_handle_event, stdout,
+ NULL, stdout,
+ options);
+ } else {
+ sres = (*decode_instructions_s)(from, to,
+ handle_event, (void*) event_cookie,
+ fprintf_callback, stdout,
+ options);
+ }
+ if (sres != (void *)to)
+ printf("*** Result of decode_instructions %p!\n", sres);
+ }
}
--- a/hotspot/src/share/tools/hsdis/hsdis.c Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/tools/hsdis/hsdis.c Fri Nov 09 08:20:03 2012 -0800
@@ -99,7 +99,7 @@
unsigned char* buffer, uintptr_t length,
event_callback_t event_callback_arg, void* event_stream_arg,
printf_callback_t printf_callback_arg, void* printf_stream_arg,
- const char* options) {
+ const char* options, int newline) {
struct hsdis_app_data app_data;
memset(&app_data, 0, sizeof(app_data));
app_data.start_va = start_va;
@@ -110,7 +110,7 @@
app_data.event_stream = event_stream_arg;
app_data.printf_callback = printf_callback_arg;
app_data.printf_stream = printf_stream_arg;
- app_data.do_newline = false;
+ app_data.do_newline = newline == 0 ? false : true;
return decode(&app_data, options);
}
@@ -132,7 +132,7 @@
event_stream_arg,
printf_callback_arg,
printf_stream_arg,
- options);
+ options, false);
}
static void* decode(struct hsdis_app_data* app_data, const char* options) {
@@ -173,7 +173,7 @@
if (!app_data->losing) {
const char* insn_close = format_insn_close("/insn", &app_data->dinfo,
buf, sizeof(buf));
- (*event_callback)(event_stream, insn_close, (void*) p) != NULL;
+ (*event_callback)(event_stream, insn_close, (void*) p);
if (app_data->do_newline) {
/* follow each complete insn by a nice newline */
@@ -182,13 +182,14 @@
}
}
- (*event_callback)(event_stream, "/insns", (void*) p);
+ if (app_data->losing) (*event_callback)(event_stream, "/insns", (void*) p);
return (void*) p;
}
}
/* take the address of the function, for luck, and also test the typedef: */
-const decode_instructions_ftype decode_instructions_address = &decode_instructions_virtual;
+const decode_func_vtype decode_func_virtual_address = &decode_instructions_virtual;
+const decode_func_stype decode_func_address = &decode_instructions;
static const char* format_insn_close(const char* close,
disassemble_info* dinfo,
--- a/hotspot/src/share/tools/hsdis/hsdis.h Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/tools/hsdis/hsdis.h Fri Nov 09 08:20:03 2012 -0800
@@ -47,6 +47,9 @@
where tag is a simple identifier, signifying (as in XML) a element start,
element end, and standalone element. (To render as XML, add angle brackets.)
*/
+#ifndef SHARED_TOOLS_HSDIS_H
+#define SHARED_TOOLS_HSDIS_H
+
extern
#ifdef DLL_EXPORT
DLL_EXPORT
@@ -57,16 +60,37 @@
void* event_stream,
int (*printf_callback)(void*, const char*, ...),
void* printf_stream,
- const char* options);
+ const char* options,
+ int newline /* bool value for nice new line */);
+
+/* This is the compatability interface for older versions of hotspot */
+extern
+#ifdef DLL_ENTRY
+ DLL_ENTRY
+#endif
+void* decode_instructions(void* start_pv, void* end_pv,
+ void* (*event_callback)(void*, const char*, void*),
+ void* event_stream,
+ int (*printf_callback)(void*, const char*, ...),
+ void* printf_stream,
+ const char* options);
/* convenience typedefs */
typedef void* (*decode_instructions_event_callback_ftype) (void*, const char*, void*);
typedef int (*decode_instructions_printf_callback_ftype) (void*, const char*, ...);
-typedef void* (*decode_instructions_ftype) (uintptr_t start_va, uintptr_t end_va,
- unsigned char* buffer, uintptr_t length,
- decode_instructions_event_callback_ftype event_callback,
- void* event_stream,
- decode_instructions_printf_callback_ftype printf_callback,
- void* printf_stream,
- const char* options);
+typedef void* (*decode_func_vtype) (uintptr_t start_va, uintptr_t end_va,
+ unsigned char* buffer, uintptr_t length,
+ decode_instructions_event_callback_ftype event_callback,
+ void* event_stream,
+ decode_instructions_printf_callback_ftype printf_callback,
+ void* printf_stream,
+ const char* options,
+ int newline);
+typedef void* (*decode_func_stype) (void* start_pv, void* end_pv,
+ decode_instructions_event_callback_ftype event_callback,
+ void* event_stream,
+ decode_instructions_printf_callback_ftype printf_callback,
+ void* printf_stream,
+ const char* options);
+#endif /* SHARED_TOOLS_HSDIS_H */
--- a/hotspot/src/share/vm/ci/ciEnv.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/ci/ciEnv.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -768,8 +768,8 @@
Method* m = lookup_method(accessor->get_instanceKlass(), lookup, name_sym, sig_sym, bc);
if (m != NULL &&
(bc == Bytecodes::_invokestatic
- ? InstanceKlass::cast(m->method_holder())->is_not_initialized()
- : !InstanceKlass::cast(m->method_holder())->is_loaded())) {
+ ? m->method_holder()->is_not_initialized()
+ : !m->method_holder()->is_loaded())) {
m = NULL;
}
if (m != NULL) {
@@ -1056,7 +1056,7 @@
method_name,
entry_bci);
}
- InstanceKlass::cast(method->method_holder())->add_osr_nmethod(nm);
+ method->method_holder()->add_osr_nmethod(nm);
}
}
--- a/hotspot/src/share/vm/ci/ciMethod.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/ci/ciMethod.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -105,7 +105,7 @@
CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
}
- if (InstanceKlass::cast(h_m()->method_holder())->is_linked()) {
+ if (h_m()->method_holder()->is_linked()) {
_can_be_statically_bound = h_m()->can_be_statically_bound();
} else {
// Have to use a conservative value in this case.
@@ -188,7 +188,7 @@
// Revert any breakpoint bytecodes in ci's copy
if (me->number_of_breakpoints() > 0) {
- BreakpointInfo* bp = InstanceKlass::cast(me->method_holder())->breakpoints();
+ BreakpointInfo* bp = me->method_holder()->breakpoints();
for (; bp != NULL; bp = bp->next()) {
if (bp->match(me)) {
code_at_put(bp->bci(), bp->orig_bytecode());
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hotspot/src/share/vm/classfile/bytecodeAssembler.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -0,0 +1,269 @@
+/*
+ * Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ *
+ */
+
+#include "precompiled.hpp"
+
+#include "classfile/bytecodeAssembler.hpp"
+#include "interpreter/bytecodes.hpp"
+#include "memory/oopFactory.hpp"
+#include "oops/constantPool.hpp"
+
+#ifdef TARGET_ARCH_x86
+# include "bytes_x86.hpp"
+#endif
+#ifdef TARGET_ARCH_sparc
+# include "bytes_sparc.hpp"
+#endif
+#ifdef TARGET_ARCH_zero
+# include "bytes_zero.hpp"
+#endif
+#ifdef TARGET_ARCH_arm
+# include "bytes_arm.hpp"
+#endif
+#ifdef TARGET_ARCH_ppc
+# include "bytes_ppc.hpp"
+#endif
+
+u2 BytecodeConstantPool::find_or_add(BytecodeCPEntry const& bcpe) {
+ u2 index;
+ u2* probe = _indices.get(bcpe);
+ if (probe == NULL) {
+ index = _entries.length();
+ _entries.append(bcpe);
+ _indices.put(bcpe, index);
+ } else {
+ index = *probe;
+ }
+ return index + _orig->length();
+}
+
+ConstantPool* BytecodeConstantPool::create_constant_pool(TRAPS) const {
+ if (_entries.length() == 0) {
+ return _orig;
+ }
+
+ ConstantPool* cp = ConstantPool::allocate(
+ _orig->pool_holder()->class_loader_data(),
+ _orig->length() + _entries.length(), CHECK_NULL);
+
+ cp->set_pool_holder(_orig->pool_holder());
+ _orig->copy_cp_to(1, _orig->length() - 1, cp, 1, CHECK_NULL);
+
+ for (int i = 0; i < _entries.length(); ++i) {
+ BytecodeCPEntry entry = _entries.at(i);
+ int idx = i + _orig->length();
+ switch (entry._tag) {
+ case BytecodeCPEntry::UTF8:
+ cp->symbol_at_put(idx, entry._u.utf8);
+ entry._u.utf8->increment_refcount();
+ break;
+ case BytecodeCPEntry::KLASS:
+ cp->unresolved_klass_at_put(
+ idx, cp->symbol_at(entry._u.klass));
+ break;
+ case BytecodeCPEntry::STRING:
+ cp->unresolved_string_at_put(
+ idx, cp->symbol_at(entry._u.string));
+ break;
+ case BytecodeCPEntry::NAME_AND_TYPE:
+ cp->name_and_type_at_put(idx,
+ entry._u.name_and_type.name_index,
+ entry._u.name_and_type.type_index);
+ break;
+ case BytecodeCPEntry::METHODREF:
+ cp->method_at_put(idx,
+ entry._u.methodref.class_index,
+ entry._u.methodref.name_and_type_index);
+ break;
+ default:
+ ShouldNotReachHere();
+ }
+ }
+ return cp;
+}
+
+void BytecodeAssembler::append(u1 imm_u1) {
+ _code->append(imm_u1);
+}
+
+void BytecodeAssembler::append(u2 imm_u2) {
+ _code->append(0);
+ _code->append(0);
+ Bytes::put_Java_u2(_code->adr_at(_code->length() - 2), imm_u2);
+}
+
+void BytecodeAssembler::append(u4 imm_u4) {
+ _code->append(0);
+ _code->append(0);
+ _code->append(0);
+ _code->append(0);
+ Bytes::put_Java_u4(_code->adr_at(_code->length() - 4), imm_u4);
+}
+
+void BytecodeAssembler::xload(u4 index, u1 onebyteop, u1 twobyteop) {
+ if (index < 4) {
+ _code->append(onebyteop + index);
+ } else {
+ _code->append(twobyteop);
+ _code->append((u2)index);
+ }
+}
+
+void BytecodeAssembler::dup() {
+ _code->append(Bytecodes::_dup);
+}
+
+void BytecodeAssembler::_new(Symbol* sym) {
+ u2 cpool_index = _cp->klass(sym);
+ _code->append(Bytecodes::_new);
+ append(cpool_index);
+}
+
+void BytecodeAssembler::load_string(Symbol* sym) {
+ u2 cpool_index = _cp->string(sym);
+ if (cpool_index < 0x100) {
+ ldc(cpool_index);
+ } else {
+ ldc_w(cpool_index);
+ }
+}
+
+void BytecodeAssembler::ldc(u1 index) {
+ _code->append(Bytecodes::_ldc);
+ append(index);
+}
+
+void BytecodeAssembler::ldc_w(u2 index) {
+ _code->append(Bytecodes::_ldc_w);
+ append(index);
+}
+
+void BytecodeAssembler::athrow() {
+ _code->append(Bytecodes::_athrow);
+}
+
+void BytecodeAssembler::iload(u4 index) {
+ xload(index, Bytecodes::_iload_0, Bytecodes::_iload);
+}
+
+void BytecodeAssembler::lload(u4 index) {
+ xload(index, Bytecodes::_lload_0, Bytecodes::_lload);
+}
+
+void BytecodeAssembler::fload(u4 index) {
+ xload(index, Bytecodes::_fload_0, Bytecodes::_fload);
+}
+
+void BytecodeAssembler::dload(u4 index) {
+ xload(index, Bytecodes::_dload_0, Bytecodes::_dload);
+}
+
+void BytecodeAssembler::aload(u4 index) {
+ xload(index, Bytecodes::_aload_0, Bytecodes::_aload);
+}
+
+void BytecodeAssembler::load(BasicType bt, u4 index) {
+ switch (bt) {
+ case T_BOOLEAN:
+ case T_CHAR:
+ case T_BYTE:
+ case T_SHORT:
+ case T_INT: iload(index); break;
+ case T_FLOAT: fload(index); break;
+ case T_DOUBLE: dload(index); break;
+ case T_LONG: lload(index); break;
+ case T_OBJECT:
+ case T_ARRAY: aload(index); break;
+ default:
+ ShouldNotReachHere();
+ }
+}
+
+void BytecodeAssembler::checkcast(Symbol* sym) {
+ u2 cpool_index = _cp->klass(sym);
+ _code->append(Bytecodes::_checkcast);
+ append(cpool_index);
+}
+
+void BytecodeAssembler::invokespecial(Method* method) {
+ invokespecial(method->klass_name(), method->name(), method->signature());
+}
+
+void BytecodeAssembler::invokespecial(Symbol* klss, Symbol* name, Symbol* sig) {
+ u2 methodref_index = _cp->methodref(klss, name, sig);
+ _code->append(Bytecodes::_invokespecial);
+ append(methodref_index);
+}
+
+void BytecodeAssembler::invokevirtual(Method* method) {
+ invokevirtual(method->klass_name(), method->name(), method->signature());
+}
+
+void BytecodeAssembler::invokevirtual(Symbol* klss, Symbol* name, Symbol* sig) {
+ u2 methodref_index = _cp->methodref(klss, name, sig);
+ _code->append(Bytecodes::_invokevirtual);
+ append(methodref_index);
+}
+
+void BytecodeAssembler::ireturn() {
+ _code->append(Bytecodes::_ireturn);
+}
+
+void BytecodeAssembler::lreturn() {
+ _code->append(Bytecodes::_lreturn);
+}
+
+void BytecodeAssembler::freturn() {
+ _code->append(Bytecodes::_freturn);
+}
+
+void BytecodeAssembler::dreturn() {
+ _code->append(Bytecodes::_dreturn);
+}
+
+void BytecodeAssembler::areturn() {
+ _code->append(Bytecodes::_areturn);
+}
+
+void BytecodeAssembler::_return() {
+ _code->append(Bytecodes::_return);
+}
+
+void BytecodeAssembler::_return(BasicType bt) {
+ switch (bt) {
+ case T_BOOLEAN:
+ case T_CHAR:
+ case T_BYTE:
+ case T_SHORT:
+ case T_INT: ireturn(); break;
+ case T_FLOAT: freturn(); break;
+ case T_DOUBLE: dreturn(); break;
+ case T_LONG: lreturn(); break;
+ case T_OBJECT:
+ case T_ARRAY: areturn(); break;
+ case T_VOID: _return(); break;
+ default:
+ ShouldNotReachHere();
+ }
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hotspot/src/share/vm/classfile/bytecodeAssembler.hpp Fri Nov 09 08:20:03 2012 -0800
@@ -0,0 +1,214 @@
+/*
+ * Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ *
+ */
+
+#ifndef SHARE_VM_CLASSFILE_BYTECODEASSEMBLER_HPP
+#define SHARE_VM_CLASSFILE_BYTECODEASSEMBLER_HPP
+
+#include "memory/allocation.hpp"
+#include "oops/method.hpp"
+#include "oops/symbol.hpp"
+#include "utilities/globalDefinitions.hpp"
+#include "utilities/growableArray.hpp"
+#include "utilities/resourceHash.hpp"
+
+
+/**
+ * Bytecode Assembler
+ *
+ * These classes are used to synthesize code for creating new methods from
+ * within the VM. This is only a partial implementation of an assembler;
+ * only the bytecodes that are needed by clients are implemented at this time.
+ * This is used during default method analysis to create overpass methods
+ * and add them to a call during parsing. Other uses (such as creating
+ * bridges) may come later. Any missing bytecodes can be implemented on an
+ * as-need basis.
+ */
+
+class BytecodeBuffer : public GrowableArray<u1> {
+ public:
+ BytecodeBuffer() : GrowableArray<u1>(20) {}
+};
+
+// Entries in a yet-to-be-created constant pool. Limited types for now.
+class BytecodeCPEntry VALUE_OBJ_CLASS_SPEC {
+ public:
+ enum tag {
+ ERROR_TAG,
+ UTF8,
+ KLASS,
+ STRING,
+ NAME_AND_TYPE,
+ METHODREF
+ };
+
+ u1 _tag;
+ union {
+ Symbol* utf8;
+ u2 klass;
+ u2 string;
+ struct {
+ u2 name_index;
+ u2 type_index;
+ } name_and_type;
+ struct {
+ u2 class_index;
+ u2 name_and_type_index;
+ } methodref;
+ uintptr_t hash;
+ } _u;
+
+ BytecodeCPEntry() : _tag(ERROR_TAG) { _u.hash = 0; }
+ BytecodeCPEntry(u1 tag) : _tag(tag) { _u.hash = 0; }
+
+ static BytecodeCPEntry utf8(Symbol* symbol) {
+ BytecodeCPEntry bcpe(UTF8);
+ bcpe._u.utf8 = symbol;
+ return bcpe;
+ }
+
+ static BytecodeCPEntry klass(u2 index) {
+ BytecodeCPEntry bcpe(KLASS);
+ bcpe._u.klass = index;
+ return bcpe;
+ }
+
+ static BytecodeCPEntry string(u2 index) {
+ BytecodeCPEntry bcpe(STRING);
+ bcpe._u.string = index;
+ return bcpe;
+ }
+
+ static BytecodeCPEntry name_and_type(u2 name, u2 type) {
+ BytecodeCPEntry bcpe(NAME_AND_TYPE);
+ bcpe._u.name_and_type.name_index = name;
+ bcpe._u.name_and_type.type_index = type;
+ return bcpe;
+ }
+
+ static BytecodeCPEntry methodref(u2 class_index, u2 nat) {
+ BytecodeCPEntry bcpe(METHODREF);
+ bcpe._u.methodref.class_index = class_index;
+ bcpe._u.methodref.name_and_type_index = nat;
+ return bcpe;
+ }
+
+ static bool equals(BytecodeCPEntry const& e0, BytecodeCPEntry const& e1) {
+ return e0._tag == e1._tag && e0._u.hash == e1._u.hash;
+ }
+
+ static unsigned hash(BytecodeCPEntry const& e0) {
+ return (unsigned)(e0._tag ^ e0._u.hash);
+ }
+};
+
+class BytecodeConstantPool : ResourceObj {
+ private:
+ typedef ResourceHashtable<BytecodeCPEntry, u2,
+ &BytecodeCPEntry::hash, &BytecodeCPEntry::equals> IndexHash;
+
+ ConstantPool* _orig;
+ GrowableArray<BytecodeCPEntry> _entries;
+ IndexHash _indices;
+
+ u2 find_or_add(BytecodeCPEntry const& bcpe);
+
+ public:
+
+ BytecodeConstantPool(ConstantPool* orig) : _orig(orig) {}
+
+ BytecodeCPEntry const& at(u2 index) const { return _entries.at(index); }
+
+ InstanceKlass* pool_holder() const {
+ return InstanceKlass::cast(_orig->pool_holder());
+ }
+
+ u2 utf8(Symbol* sym) {
+ return find_or_add(BytecodeCPEntry::utf8(sym));
+ }
+
+ u2 klass(Symbol* class_name) {
+ return find_or_add(BytecodeCPEntry::klass(utf8(class_name)));
+ }
+
+ u2 string(Symbol* str) {
+ return find_or_add(BytecodeCPEntry::string(utf8(str)));
+ }
+
+ u2 name_and_type(Symbol* name, Symbol* sig) {
+ return find_or_add(BytecodeCPEntry::name_and_type(utf8(name), utf8(sig)));
+ }
+
+ u2 methodref(Symbol* class_name, Symbol* name, Symbol* sig) {
+ return find_or_add(BytecodeCPEntry::methodref(
+ klass(class_name), name_and_type(name, sig)));
+ }
+
+ ConstantPool* create_constant_pool(TRAPS) const;
+};
+
+// Partial bytecode assembler - only what we need for creating
+// overpass methods for default methods is implemented
+class BytecodeAssembler : StackObj {
+ private:
+ BytecodeBuffer* _code;
+ BytecodeConstantPool* _cp;
+
+ void append(u1 imm_u1);
+ void append(u2 imm_u2);
+ void append(u4 imm_u4);
+
+ void xload(u4 index, u1 quick, u1 twobyte);
+
+ public:
+ BytecodeAssembler(BytecodeBuffer* buffer, BytecodeConstantPool* cp)
+ : _code(buffer), _cp(cp) {}
+
+ void aload(u4 index);
+ void areturn();
+ void athrow();
+ void checkcast(Symbol* sym);
+ void dload(u4 index);
+ void dreturn();
+ void dup();
+ void fload(u4 index);
+ void freturn();
+ void iload(u4 index);
+ void invokespecial(Method* method);
+ void invokespecial(Symbol* cls, Symbol* name, Symbol* sig);
+ void invokevirtual(Method* method);
+ void invokevirtual(Symbol* cls, Symbol* name, Symbol* sig);
+ void ireturn();
+ void ldc(u1 index);
+ void ldc_w(u2 index);
+ void lload(u4 index);
+ void lreturn();
+ void _new(Symbol* sym);
+ void _return();
+
+ void load_string(Symbol* sym);
+ void load(BasicType bt, u4 index);
+ void _return(BasicType bt);
+};
+
+#endif // SHARE_VM_CLASSFILE_BYTECODEASSEMBLER_HPP
--- a/hotspot/src/share/vm/classfile/classFileParser.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/classfile/classFileParser.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -27,6 +27,8 @@
#include "classfile/classLoader.hpp"
#include "classfile/classLoaderData.hpp"
#include "classfile/classLoaderData.inline.hpp"
+#include "classfile/defaultMethods.hpp"
+#include "classfile/genericSignatures.hpp"
#include "classfile/javaClasses.hpp"
#include "classfile/symbolTable.hpp"
#include "classfile/systemDictionary.hpp"
@@ -84,6 +86,9 @@
// - to check NameAndType_info signatures more aggressively
#define JAVA_7_VERSION 51
+// Extension method support.
+#define JAVA_8_VERSION 52
+
void ClassFileParser::parse_constant_pool_entries(ClassLoaderData* loader_data, constantPoolHandle cp, int length, TRAPS) {
// Use a local copy of ClassFileStream. It helps the C++ compiler to optimize
@@ -785,6 +790,7 @@
ClassLoaderData* loader_data,
Handle protection_domain,
Symbol* class_name,
+ bool* has_default_methods,
TRAPS) {
ClassFileStream* cfs = stream();
assert(length > 0, "only called for length>0");
@@ -821,6 +827,9 @@
if (!Klass::cast(interf())->is_interface()) {
THROW_MSG_(vmSymbols::java_lang_IncompatibleClassChangeError(), "Implementing class", NULL);
}
+ if (InstanceKlass::cast(interf())->has_default_methods()) {
+ *has_default_methods = true;
+ }
interfaces->at_put(index, interf());
}
@@ -1928,7 +1937,8 @@
if (method_attribute_name == vmSymbols::tag_code()) {
// Parse Code attribute
if (_need_verify) {
- guarantee_property(!access_flags.is_native() && !access_flags.is_abstract(),
+ guarantee_property(
+ !access_flags.is_native() && !access_flags.is_abstract(),
"Code attribute in native or abstract methods in class file %s",
CHECK_(nullHandle));
}
@@ -2125,7 +2135,9 @@
runtime_visible_annotations_length = method_attribute_length;
runtime_visible_annotations = cfs->get_u1_buffer();
assert(runtime_visible_annotations != NULL, "null visible annotations");
- parse_annotations(runtime_visible_annotations, runtime_visible_annotations_length, cp, &parsed_annotations, CHECK_(nullHandle));
+ parse_annotations(runtime_visible_annotations,
+ runtime_visible_annotations_length, cp, &parsed_annotations,
+ CHECK_(nullHandle));
cfs->skip_u1(runtime_visible_annotations_length, CHECK_(nullHandle));
} else if (PreserveAllAnnotations && method_attribute_name == vmSymbols::tag_runtime_invisible_annotations()) {
runtime_invisible_annotations_length = method_attribute_length;
@@ -2169,12 +2181,10 @@
}
// All sizing information for a Method* is finally available, now create it
- Method* m = Method::allocate(loader_data, code_length, access_flags,
- linenumber_table_length,
- total_lvt_length,
- exception_table_length,
- checked_exceptions_length,
- CHECK_(nullHandle));
+ Method* m = Method::allocate(
+ loader_data, code_length, access_flags, linenumber_table_length,
+ total_lvt_length, exception_table_length, checked_exceptions_length,
+ ConstMethod::NORMAL, CHECK_(nullHandle));
ClassLoadingService::add_class_method_size(m->size()*HeapWordSize);
@@ -2204,7 +2214,6 @@
// Fill in code attribute information
m->set_max_stack(max_stack);
m->set_max_locals(max_locals);
-
m->constMethod()->set_stackmap_data(stackmap_data);
// Copy byte codes
@@ -2356,6 +2365,7 @@
Array<AnnotationArray*>** methods_annotations,
Array<AnnotationArray*>** methods_parameter_annotations,
Array<AnnotationArray*>** methods_default_annotations,
+ bool* has_default_methods,
TRAPS) {
ClassFileStream* cfs = stream();
AnnotationArray* method_annotations = NULL;
@@ -2382,6 +2392,10 @@
if (method->is_final()) {
*has_final_method = true;
}
+ if (is_interface && !method->is_abstract() && !method->is_static()) {
+ // default method
+ *has_default_methods = true;
+ }
methods->at_put(index, method());
if (*methods_annotations == NULL) {
*methods_annotations =
@@ -2907,6 +2921,34 @@
}
+#ifndef PRODUCT
+static void parseAndPrintGenericSignatures(
+ instanceKlassHandle this_klass, TRAPS) {
+ assert(ParseAllGenericSignatures == true, "Shouldn't call otherwise");
+ ResourceMark rm;
+
+ if (this_klass->generic_signature() != NULL) {
+ using namespace generic;
+ ClassDescriptor* spec = ClassDescriptor::parse_generic_signature(this_klass(), CHECK);
+
+ tty->print_cr("Parsing %s", this_klass->generic_signature()->as_C_string());
+ spec->print_on(tty);
+
+ for (int i = 0; i < this_klass->methods()->length(); ++i) {
+ Method* m = this_klass->methods()->at(i);
+ MethodDescriptor* method_spec = MethodDescriptor::parse_generic_signature(m, spec);
+ Symbol* sig = m->generic_signature();
+ if (sig == NULL) {
+ sig = m->signature();
+ }
+ tty->print_cr("Parsing %s", sig->as_C_string());
+ method_spec->print_on(tty);
+ }
+ }
+}
+#endif // ndef PRODUCT
+
+
instanceKlassHandle ClassFileParser::parseClassFile(Symbol* name,
Handle class_loader,
Handle protection_domain,
@@ -2923,6 +2965,8 @@
unsigned char *cached_class_file_bytes = NULL;
jint cached_class_file_length;
ClassLoaderData* loader_data = ClassLoaderData::class_loader_data(class_loader());
+ bool has_default_methods = false;
+ ResourceMark rm(THREAD);
ClassFileStream* cfs = stream();
// Timing
@@ -3138,7 +3182,9 @@
if (itfs_len == 0) {
local_interfaces = Universe::the_empty_klass_array();
} else {
- local_interfaces = parse_interfaces(cp, itfs_len, loader_data, protection_domain, _class_name, CHECK_(nullHandle));
+ local_interfaces = parse_interfaces(
+ cp, itfs_len, loader_data, protection_domain, _class_name,
+ &has_default_methods, CHECK_(nullHandle));
}
u2 java_fields_count = 0;
@@ -3164,6 +3210,7 @@
&methods_annotations,
&methods_parameter_annotations,
&methods_default_annotations,
+ &has_default_methods,
CHECK_(nullHandle));
// Additional attributes
@@ -3193,6 +3240,11 @@
super_klass = instanceKlassHandle(THREAD, kh());
}
if (super_klass.not_null()) {
+
+ if (super_klass->has_default_methods()) {
+ has_default_methods = true;
+ }
+
if (super_klass->is_interface()) {
ResourceMark rm(THREAD);
Exceptions::fthrow(
@@ -3229,14 +3281,11 @@
int itable_size = 0;
int num_miranda_methods = 0;
- klassVtable::compute_vtable_size_and_num_mirandas(vtable_size,
- num_miranda_methods,
- super_klass(),
- methods,
- access_flags,
- class_loader,
- class_name,
- local_interfaces,
+ GrowableArray<Method*> all_mirandas(20);
+
+ klassVtable::compute_vtable_size_and_num_mirandas(
+ &vtable_size, &num_miranda_methods, &all_mirandas, super_klass(), methods,
+ access_flags, class_loader, class_name, local_interfaces,
CHECK_(nullHandle));
// Size of Java itable (in words)
@@ -3656,6 +3705,7 @@
this_klass->set_minor_version(minor_version);
this_klass->set_major_version(major_version);
+ this_klass->set_has_default_methods(has_default_methods);
// Set up Method*::intrinsic_id as soon as we know the names of methods.
// (We used to do this lazily, but now we query it in Rewriter,
@@ -3673,6 +3723,16 @@
cached_class_file_length);
}
+ // Fill in field values obtained by parse_classfile_attributes
+ if (parsed_annotations.has_any_annotations())
+ parsed_annotations.apply_to(this_klass);
+ // Create annotations
+ if (_annotations != NULL && this_klass->annotations() == NULL) {
+ Annotations* anno = Annotations::allocate(loader_data, CHECK_NULL);
+ this_klass->set_annotations(anno);
+ }
+ apply_parsed_class_attributes(this_klass);
+
// Miranda methods
if ((num_miranda_methods > 0) ||
// if this class introduced new miranda methods or
@@ -3682,18 +3742,6 @@
this_klass->set_has_miranda_methods(); // then set a flag
}
- // Fill in field values obtained by parse_classfile_attributes
- if (parsed_annotations.has_any_annotations()) {
- parsed_annotations.apply_to(this_klass);
- }
- // Create annotations
- if (_annotations != NULL && this_klass->annotations() == NULL) {
- Annotations* anno = Annotations::allocate(loader_data, CHECK_NULL);
- this_klass->set_annotations(anno);
- }
- apply_parsed_class_attributes(this_klass);
-
- // Compute transitive closure of interfaces this class implements
this_klass->set_transitive_interfaces(transitive_interfaces);
// Fill in information needed to compute superclasses.
@@ -3702,6 +3750,7 @@
// Initialize itable offset tables
klassItable::setup_itable_offset_table(this_klass);
+ // Compute transitive closure of interfaces this class implements
// Do final class setup
fill_oop_maps(this_klass, nonstatic_oop_map_count, nonstatic_oop_offsets, nonstatic_oop_counts);
@@ -3726,6 +3775,21 @@
check_illegal_static_method(this_klass, CHECK_(nullHandle));
}
+
+#ifdef ASSERT
+ if (ParseAllGenericSignatures) {
+ parseAndPrintGenericSignatures(this_klass, CHECK_(nullHandle));
+ }
+#endif
+
+ // Generate any default methods - default methods are interface methods
+ // that have a default implementation. This is new with Lambda project.
+ if (has_default_methods && !access_flags.is_interface() &&
+ local_interfaces->length() > 0) {
+ DefaultMethods::generate_default_methods(
+ this_klass(), &all_mirandas, CHECK_(nullHandle));
+ }
+
// Allocate mirror and initialize static fields
java_lang_Class::create_mirror(this_klass, CHECK_(nullHandle));
@@ -3744,6 +3808,7 @@
false /* not shared class */);
if (TraceClassLoading) {
+ ResourceMark rm;
// print in a single call to reduce interleaving of output
if (cfs->source() != NULL) {
tty->print("[Loaded %s from %s]\n", this_klass->external_name(),
@@ -3758,13 +3823,13 @@
tty->print("[Loaded %s]\n", this_klass->external_name());
}
} else {
- ResourceMark rm;
tty->print("[Loaded %s from %s]\n", this_klass->external_name(),
InstanceKlass::cast(class_loader->klass())->external_name());
}
}
if (TraceClassResolution) {
+ ResourceMark rm;
// print out the superclass.
const char * from = Klass::cast(this_klass())->external_name();
if (this_klass->java_super() != NULL) {
@@ -3785,6 +3850,7 @@
#ifndef PRODUCT
if( PrintCompactFieldsSavings ) {
+ ResourceMark rm;
if( nonstatic_field_size < orig_nonstatic_field_size ) {
tty->print("[Saved %d of %d bytes in %s]\n",
(orig_nonstatic_field_size - nonstatic_field_size)*heapOopSize,
@@ -3811,7 +3877,6 @@
return this_klass;
}
-
unsigned int
ClassFileParser::compute_oop_map_count(instanceKlassHandle super,
unsigned int nonstatic_oop_map_count,
@@ -4128,7 +4193,7 @@
}
// continue to look from super_m's holder's super.
- k = InstanceKlass::cast(super_m->method_holder())->super();
+ k = super_m->method_holder()->super();
continue;
}
@@ -4263,13 +4328,16 @@
const bool is_strict = (flags & JVM_ACC_STRICT) != 0;
const bool is_synchronized = (flags & JVM_ACC_SYNCHRONIZED) != 0;
const bool major_gte_15 = _major_version >= JAVA_1_5_VERSION;
+ const bool major_gte_8 = _major_version >= JAVA_8_VERSION;
const bool is_initializer = (name == vmSymbols::object_initializer_name());
bool is_illegal = false;
if (is_interface) {
- if (!is_abstract || !is_public || is_static || is_final ||
- is_native || (major_gte_15 && (is_synchronized || is_strict))) {
+ if (!is_public || is_static || is_final || is_native ||
+ ((is_synchronized || is_strict) && major_gte_15 &&
+ (!major_gte_8 || is_abstract)) ||
+ (!major_gte_8 && !is_abstract)) {
is_illegal = true;
}
} else { // not interface
--- a/hotspot/src/share/vm/classfile/classFileParser.hpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/classfile/classFileParser.hpp Fri Nov 09 08:20:03 2012 -0800
@@ -151,6 +151,7 @@
ClassLoaderData* loader_data,
Handle protection_domain,
Symbol* class_name,
+ bool* has_default_methods,
TRAPS);
void record_defined_class_dependencies(instanceKlassHandle defined_klass, TRAPS);
@@ -188,6 +189,7 @@
Array<AnnotationArray*>** methods_annotations,
Array<AnnotationArray*>** methods_parameter_annotations,
Array<AnnotationArray*>** methods_default_annotations,
+ bool* has_default_method,
TRAPS);
Array<int>* sort_methods(ClassLoaderData* loader_data,
Array<Method*>* methods,
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hotspot/src/share/vm/classfile/defaultMethods.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -0,0 +1,1387 @@
+/*
+ * Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ *
+ */
+
+#include "precompiled.hpp"
+#include "classfile/bytecodeAssembler.hpp"
+#include "classfile/defaultMethods.hpp"
+#include "classfile/genericSignatures.hpp"
+#include "classfile/symbolTable.hpp"
+#include "memory/allocation.hpp"
+#include "memory/metadataFactory.hpp"
+#include "memory/resourceArea.hpp"
+#include "runtime/signature.hpp"
+#include "runtime/thread.hpp"
+#include "oops/instanceKlass.hpp"
+#include "oops/klass.hpp"
+#include "oops/method.hpp"
+#include "utilities/accessFlags.hpp"
+#include "utilities/exceptions.hpp"
+#include "utilities/ostream.hpp"
+#include "utilities/pair.hpp"
+#include "utilities/resourceHash.hpp"
+
+typedef enum { QUALIFIED, DISQUALIFIED } QualifiedState;
+
+// Because we use an iterative algorithm when iterating over the type
+// hierarchy, we can't use traditional scoped objects which automatically do
+// cleanup in the destructor when the scope is exited. PseudoScope (and
+// PseudoScopeMark) provides a similar functionality, but for when you want a
+// scoped object in non-stack memory (such as in resource memory, as we do
+// here). You've just got to remember to call 'destroy()' on the scope when
+// leaving it (and marks have to be explicitly added).
+class PseudoScopeMark : public ResourceObj {
+ public:
+ virtual void destroy() = 0;
+};
+
+class PseudoScope : public ResourceObj {
+ private:
+ GrowableArray<PseudoScopeMark*> _marks;
+ public:
+
+ static PseudoScope* cast(void* data) {
+ return static_cast<PseudoScope*>(data);
+ }
+
+ void add_mark(PseudoScopeMark* psm) {
+ _marks.append(psm);
+ }
+
+ void destroy() {
+ for (int i = 0; i < _marks.length(); ++i) {
+ _marks.at(i)->destroy();
+ }
+ }
+};
+
+class ContextMark : public PseudoScopeMark {
+ private:
+ generic::Context::Mark _mark;
+ public:
+ ContextMark(const generic::Context::Mark& cm) : _mark(cm) {}
+ virtual void destroy() { _mark.destroy(); }
+};
+
+#ifndef PRODUCT
+static void print_slot(outputStream* str, Symbol* name, Symbol* signature) {
+ ResourceMark rm;
+ str->print("%s%s", name->as_C_string(), signature->as_C_string());
+}
+
+static void print_method(outputStream* str, Method* mo, bool with_class=true) {
+ ResourceMark rm;
+ if (with_class) {
+ str->print("%s.", mo->klass_name()->as_C_string());
+ }
+ print_slot(str, mo->name(), mo->signature());
+}
+#endif // ndef PRODUCT
+
+/**
+ * Perform a depth-first iteration over the class hierarchy, applying
+ * algorithmic logic as it goes.
+ *
+ * This class is one half of the inheritance hierarchy analysis mechanism.
+ * It is meant to be used in conjunction with another class, the algorithm,
+ * which is indicated by the ALGO template parameter. This class can be
+ * paired with any algorithm class that provides the required methods.
+ *
+ * This class contains all the mechanics for iterating over the class hierarchy
+ * starting at a particular root, without recursing (thus limiting stack growth
+ * from this point). It visits each superclass (if present) and superinterface
+ * in a depth-first manner, with callbacks to the ALGO class as each class is
+ * encountered (visit()), The algorithm can cut-off further exploration of a
+ * particular branch by returning 'false' from a visit() call.
+ *
+ * The ALGO class, must provide a visit() method, which each of which will be
+ * called once for each node in the inheritance tree during the iteration. In
+ * addition, it can provide a memory block via new_node_data(InstanceKlass*),
+ * which it can use for node-specific storage (and access via the
+ * current_data() and data_at_depth(int) methods).
+ *
+ * Bare minimum needed to be an ALGO class:
+ * class Algo : public HierarchyVisitor<Algo> {
+ * void* new_node_data(InstanceKlass* cls) { return NULL; }
+ * void free_node_data(void* data) { return; }
+ * bool visit() { return true; }
+ * };
+ */
+template <class ALGO>
+class HierarchyVisitor : StackObj {
+ private:
+
+ class Node : public ResourceObj {
+ public:
+ InstanceKlass* _class;
+ bool _super_was_visited;
+ int _interface_index;
+ void* _algorithm_data;
+
+ Node(InstanceKlass* cls, void* data, bool visit_super)
+ : _class(cls), _super_was_visited(!visit_super),
+ _interface_index(0), _algorithm_data(data) {}
+
+ int number_of_interfaces() { return _class->local_interfaces()->length(); }
+ int interface_index() { return _interface_index; }
+ void set_super_visited() { _super_was_visited = true; }
+ void increment_visited_interface() { ++_interface_index; }
+ void set_all_interfaces_visited() {
+ _interface_index = number_of_interfaces();
+ }
+ bool has_visited_super() { return _super_was_visited; }
+ bool has_visited_all_interfaces() {
+ return interface_index() >= number_of_interfaces();
+ }
+ InstanceKlass* interface_at(int index) {
+ return InstanceKlass::cast(_class->local_interfaces()->at(index));
+ }
+ InstanceKlass* next_super() { return _class->java_super(); }
+ InstanceKlass* next_interface() {
+ return interface_at(interface_index());
+ }
+ };
+
+ bool _cancelled;
+ GrowableArray<Node*> _path;
+
+ Node* current_top() const { return _path.top(); }
+ bool has_more_nodes() const { return !_path.is_empty(); }
+ void push(InstanceKlass* cls, void* data) {
+ assert(cls != NULL, "Requires a valid instance class");
+ Node* node = new Node(cls, data, has_super(cls));
+ _path.push(node);
+ }
+ void pop() { _path.pop(); }
+
+ void reset_iteration() {
+ _cancelled = false;
+ _path.clear();
+ }
+ bool is_cancelled() const { return _cancelled; }
+
+ static bool has_super(InstanceKlass* cls) {
+ return cls->super() != NULL && !cls->is_interface();
+ }
+
+ Node* node_at_depth(int i) const {
+ return (i >= _path.length()) ? NULL : _path.at(_path.length() - i - 1);
+ }
+
+ protected:
+
+ // Accessors available to the algorithm
+ int current_depth() const { return _path.length() - 1; }
+
+ InstanceKlass* class_at_depth(int i) {
+ Node* n = node_at_depth(i);
+ return n == NULL ? NULL : n->_class;
+ }
+ InstanceKlass* current_class() { return class_at_depth(0); }
+
+ void* data_at_depth(int i) {
+ Node* n = node_at_depth(i);
+ return n == NULL ? NULL : n->_algorithm_data;
+ }
+ void* current_data() { return data_at_depth(0); }
+
+ void cancel_iteration() { _cancelled = true; }
+
+ public:
+
+ void run(InstanceKlass* root) {
+ ALGO* algo = static_cast<ALGO*>(this);
+
+ reset_iteration();
+
+ void* algo_data = algo->new_node_data(root);
+ push(root, algo_data);
+ bool top_needs_visit = true;
+
+ do {
+ Node* top = current_top();
+ if (top_needs_visit) {
+ if (algo->visit() == false) {
+ // algorithm does not want to continue along this path. Arrange
+ // it so that this state is immediately popped off the stack
+ top->set_super_visited();
+ top->set_all_interfaces_visited();
+ }
+ top_needs_visit = false;
+ }
+
+ if (top->has_visited_super() && top->has_visited_all_interfaces()) {
+ algo->free_node_data(top->_algorithm_data);
+ pop();
+ } else {
+ InstanceKlass* next = NULL;
+ if (top->has_visited_super() == false) {
+ next = top->next_super();
+ top->set_super_visited();
+ } else {
+ next = top->next_interface();
+ top->increment_visited_interface();
+ }
+ assert(next != NULL, "Otherwise we shouldn't be here");
+ algo_data = algo->new_node_data(next);
+ push(next, algo_data);
+ top_needs_visit = true;
+ }
+ } while (!is_cancelled() && has_more_nodes());
+ }
+};
+
+#ifndef PRODUCT
+class PrintHierarchy : public HierarchyVisitor<PrintHierarchy> {
+ public:
+
+ bool visit() {
+ InstanceKlass* cls = current_class();
+ streamIndentor si(tty, current_depth() * 2);
+ tty->indent().print_cr("%s", cls->name()->as_C_string());
+ return true;
+ }
+
+ void* new_node_data(InstanceKlass* cls) { return NULL; }
+ void free_node_data(void* data) { return; }
+};
+#endif // ndef PRODUCT
+
+// Used to register InstanceKlass objects and all related metadata structures
+// (Methods, ConstantPools) as "in-use" by the current thread so that they can't
+// be deallocated by class redefinition while we're using them. The classes are
+// de-registered when this goes out of scope.
+//
+// Once a class is registered, we need not bother with methodHandles or
+// constantPoolHandles for it's associated metadata.
+class KeepAliveRegistrar : public StackObj {
+ private:
+ Thread* _thread;
+ GrowableArray<ConstantPool*> _keep_alive;
+
+ public:
+ KeepAliveRegistrar(Thread* thread) : _thread(thread), _keep_alive(20) {
+ assert(thread == Thread::current(), "Must be current thread");
+ }
+
+ ~KeepAliveRegistrar() {
+ for (int i = _keep_alive.length() - 1; i >= 0; --i) {
+ ConstantPool* cp = _keep_alive.at(i);
+ int idx = _thread->metadata_handles()->find_from_end(cp);
+ assert(idx > 0, "Must be in the list");
+ _thread->metadata_handles()->remove_at(idx);
+ }
+ }
+
+ // Register a class as 'in-use' by the thread. It's fine to register a class
+ // multiple times (though perhaps inefficient)
+ void register_class(InstanceKlass* ik) {
+ ConstantPool* cp = ik->constants();
+ _keep_alive.push(cp);
+ _thread->metadata_handles()->push(cp);
+ }
+};
+
+class KeepAliveVisitor : public HierarchyVisitor<KeepAliveVisitor> {
+ private:
+ KeepAliveRegistrar* _registrar;
+
+ public:
+ KeepAliveVisitor(KeepAliveRegistrar* registrar) : _registrar(registrar) {}
+
+ void* new_node_data(InstanceKlass* cls) { return NULL; }
+ void free_node_data(void* data) { return; }
+
+ bool visit() {
+ _registrar->register_class(current_class());
+ return true;
+ }
+};
+
+// A method family contains a set of all methods that implement a single
+// language-level method. Because of erasure, these methods may have different
+// signatures. As members of the set are collected while walking over the
+// hierarchy, they are tagged with a qualification state. The qualification
+// state for an erased method is set to disqualified if there exists a path
+// from the root of hierarchy to the method that contains an interleaving
+// language-equivalent method defined in an interface.
+class MethodFamily : public ResourceObj {
+ private:
+
+ generic::MethodDescriptor* _descriptor; // language-level description
+ GrowableArray<Pair<Method*,QualifiedState> > _members;
+ ResourceHashtable<Method*, int> _member_index;
+
+ Method* _selected_target; // Filled in later, if a unique target exists
+ Symbol* _exception_message; // If no unique target is found
+
+ bool contains_method(Method* method) {
+ int* lookup = _member_index.get(method);
+ return lookup != NULL;
+ }
+
+ void add_method(Method* method, QualifiedState state) {
+ Pair<Method*,QualifiedState> entry(method, state);
+ _member_index.put(method, _members.length());
+ _members.append(entry);
+ }
+
+ void disqualify_method(Method* method) {
+ int* index = _member_index.get(method);
+ assert(index != NULL && *index >= 0 && *index < _members.length(), "bad index");
+ _members.at(*index).second = DISQUALIFIED;
+ }
+
+ Symbol* generate_no_defaults_message(TRAPS) const;
+ Symbol* generate_abstract_method_message(Method* method, TRAPS) const;
+ Symbol* generate_conflicts_message(GrowableArray<Method*>* methods, TRAPS) const;
+
+ public:
+
+ MethodFamily(generic::MethodDescriptor* canonical_desc)
+ : _descriptor(canonical_desc), _selected_target(NULL),
+ _exception_message(NULL) {}
+
+ generic::MethodDescriptor* descriptor() const { return _descriptor; }
+
+ bool descriptor_matches(generic::MethodDescriptor* md, generic::Context* ctx) {
+ return descriptor()->covariant_match(md, ctx);
+ }
+
+ void set_target_if_empty(Method* m) {
+ if (_selected_target == NULL && !m->is_overpass()) {
+ _selected_target = m;
+ }
+ }
+
+ void record_qualified_method(Method* m) {
+ // If the method already exists in the set as qualified, this operation is
+ // redundant. If it already exists as disqualified, then we leave it as
+ // disqualfied. Thus we only add to the set if it's not already in the
+ // set.
+ if (!contains_method(m)) {
+ add_method(m, QUALIFIED);
+ }
+ }
+
+ void record_disqualified_method(Method* m) {
+ // If not in the set, add it as disqualified. If it's already in the set,
+ // then set the state to disqualified no matter what the previous state was.
+ if (!contains_method(m)) {
+ add_method(m, DISQUALIFIED);
+ } else {
+ disqualify_method(m);
+ }
+ }
+
+ bool has_target() const { return _selected_target != NULL; }
+ bool throws_exception() { return _exception_message != NULL; }
+
+ Method* get_selected_target() { return _selected_target; }
+ Symbol* get_exception_message() { return _exception_message; }
+
+ // Either sets the target or the exception error message
+ void determine_target(InstanceKlass* root, TRAPS) {
+ if (has_target() || throws_exception()) {
+ return;
+ }
+
+ GrowableArray<Method*> qualified_methods;
+ for (int i = 0; i < _members.length(); ++i) {
+ Pair<Method*,QualifiedState> entry = _members.at(i);
+ if (entry.second == QUALIFIED) {
+ qualified_methods.append(entry.first);
+ }
+ }
+
+ if (qualified_methods.length() == 0) {
+ _exception_message = generate_no_defaults_message(CHECK);
+ } else if (qualified_methods.length() == 1) {
+ Method* method = qualified_methods.at(0);
+ if (method->is_abstract()) {
+ _exception_message = generate_abstract_method_message(method, CHECK);
+ } else {
+ _selected_target = qualified_methods.at(0);
+ }
+ } else {
+ _exception_message = generate_conflicts_message(&qualified_methods,CHECK);
+ }
+
+ assert((has_target() ^ throws_exception()) == 1,
+ "One and only one must be true");
+ }
+
+ bool contains_signature(Symbol* query) {
+ for (int i = 0; i < _members.length(); ++i) {
+ if (query == _members.at(i).first->signature()) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+#ifndef PRODUCT
+ void print_on(outputStream* str) const {
+ print_on(str, 0);
+ }
+
+ void print_on(outputStream* str, int indent) const {
+ streamIndentor si(str, indent * 2);
+
+ generic::Context ctx(NULL); // empty, as _descriptor already canonicalized
+ TempNewSymbol family = descriptor()->reify_signature(&ctx, Thread::current());
+ str->indent().print_cr("Logical Method %s:", family->as_C_string());
+
+ streamIndentor si2(str);
+ for (int i = 0; i < _members.length(); ++i) {
+ str->indent();
+ print_method(str, _members.at(i).first);
+ if (_members.at(i).second == DISQUALIFIED) {
+ str->print(" (disqualified)");
+ }
+ str->print_cr("");
+ }
+
+ if (_selected_target != NULL) {
+ print_selected(str, 1);
+ }
+ }
+
+ void print_selected(outputStream* str, int indent) const {
+ assert(has_target(), "Should be called otherwise");
+ streamIndentor si(str, indent * 2);
+ str->indent().print("Selected method: ");
+ print_method(str, _selected_target);
+ str->print_cr("");
+ }
+
+ void print_exception(outputStream* str, int indent) {
+ assert(throws_exception(), "Should be called otherwise");
+ streamIndentor si(str, indent * 2);
+ str->indent().print_cr("%s", _exception_message->as_C_string());
+ }
+#endif // ndef PRODUCT
+};
+
+Symbol* MethodFamily::generate_no_defaults_message(TRAPS) const {
+ return SymbolTable::new_symbol("No qualifying defaults found", CHECK_NULL);
+}
+
+Symbol* MethodFamily::generate_abstract_method_message(Method* method, TRAPS) const {
+ Symbol* klass = method->klass_name();
+ Symbol* name = method->name();
+ Symbol* sig = method->signature();
+ stringStream ss;
+ ss.print("Method ");
+ ss.write((const char*)klass->bytes(), klass->utf8_length());
+ ss.print(".");
+ ss.write((const char*)name->bytes(), name->utf8_length());
+ ss.write((const char*)sig->bytes(), sig->utf8_length());
+ ss.print(" is abstract");
+ return SymbolTable::new_symbol(ss.base(), (int)ss.size(), CHECK_NULL);
+}
+
+Symbol* MethodFamily::generate_conflicts_message(GrowableArray<Method*>* methods, TRAPS) const {
+ stringStream ss;
+ ss.print("Conflicting default methods:");
+ for (int i = 0; i < methods->length(); ++i) {
+ Method* method = methods->at(i);
+ Symbol* klass = method->klass_name();
+ Symbol* name = method->name();
+ ss.print(" ");
+ ss.write((const char*)klass->bytes(), klass->utf8_length());
+ ss.print(".");
+ ss.write((const char*)name->bytes(), name->utf8_length());
+ }
+ return SymbolTable::new_symbol(ss.base(), (int)ss.size(), CHECK_NULL);
+}
+
+class StateRestorer;
+
+// StatefulMethodFamily is a wrapper around MethodFamily that maintains the
+// qualification state during hierarchy visitation, and applies that state
+// when adding members to the MethodFamily.
+class StatefulMethodFamily : public ResourceObj {
+ friend class StateRestorer;
+ private:
+ MethodFamily* _method;
+ QualifiedState _qualification_state;
+
+ void set_qualification_state(QualifiedState state) {
+ _qualification_state = state;
+ }
+
+ public:
+ StatefulMethodFamily(generic::MethodDescriptor* md, generic::Context* ctx) {
+ _method = new MethodFamily(md->canonicalize(ctx));
+ _qualification_state = QUALIFIED;
+ }
+
+ void set_target_if_empty(Method* m) { _method->set_target_if_empty(m); }
+
+ MethodFamily* get_method_family() { return _method; }
+
+ bool descriptor_matches(generic::MethodDescriptor* md, generic::Context* ctx) {
+ return _method->descriptor_matches(md, ctx);
+ }
+
+ StateRestorer* record_method_and_dq_further(Method* mo);
+};
+
+class StateRestorer : public PseudoScopeMark {
+ private:
+ StatefulMethodFamily* _method;
+ QualifiedState _state_to_restore;
+ public:
+ StateRestorer(StatefulMethodFamily* dm, QualifiedState state)
+ : _method(dm), _state_to_restore(state) {}
+ ~StateRestorer() { destroy(); }
+ void restore_state() { _method->set_qualification_state(_state_to_restore); }
+ virtual void destroy() { restore_state(); }
+};
+
+StateRestorer* StatefulMethodFamily::record_method_and_dq_further(Method* mo) {
+ StateRestorer* mark = new StateRestorer(this, _qualification_state);
+ if (_qualification_state == QUALIFIED) {
+ _method->record_qualified_method(mo);
+ } else {
+ _method->record_disqualified_method(mo);
+ }
+ // Everything found "above"??? this method in the hierarchy walk is set to
+ // disqualified
+ set_qualification_state(DISQUALIFIED);
+ return mark;
+}
+
+class StatefulMethodFamilies : public ResourceObj {
+ private:
+ GrowableArray<StatefulMethodFamily*> _methods;
+
+ public:
+ StatefulMethodFamily* find_matching(
+ generic::MethodDescriptor* md, generic::Context* ctx) {
+ for (int i = 0; i < _methods.length(); ++i) {
+ StatefulMethodFamily* existing = _methods.at(i);
+ if (existing->descriptor_matches(md, ctx)) {
+ return existing;
+ }
+ }
+ return NULL;
+ }
+
+ StatefulMethodFamily* find_matching_or_create(
+ generic::MethodDescriptor* md, generic::Context* ctx) {
+ StatefulMethodFamily* method = find_matching(md, ctx);
+ if (method == NULL) {
+ method = new StatefulMethodFamily(md, ctx);
+ _methods.append(method);
+ }
+ return method;
+ }
+
+ void extract_families_into(GrowableArray<MethodFamily*>* array) {
+ for (int i = 0; i < _methods.length(); ++i) {
+ array->append(_methods.at(i)->get_method_family());
+ }
+ }
+};
+
+// Represents a location corresponding to a vtable slot for methods that
+// neither the class nor any of it's ancestors provide an implementaion.
+// Default methods may be present to fill this slot.
+class EmptyVtableSlot : public ResourceObj {
+ private:
+ Symbol* _name;
+ Symbol* _signature;
+ int _size_of_parameters;
+ MethodFamily* _binding;
+
+ public:
+ EmptyVtableSlot(Method* method)
+ : _name(method->name()), _signature(method->signature()),
+ _size_of_parameters(method->size_of_parameters()), _binding(NULL) {}
+
+ Symbol* name() const { return _name; }
+ Symbol* signature() const { return _signature; }
+ int size_of_parameters() const { return _size_of_parameters; }
+
+ void bind_family(MethodFamily* lm) { _binding = lm; }
+ bool is_bound() { return _binding != NULL; }
+ MethodFamily* get_binding() { return _binding; }
+
+#ifndef PRODUCT
+ void print_on(outputStream* str) const {
+ print_slot(str, name(), signature());
+ }
+#endif // ndef PRODUCT
+};
+
+static GrowableArray<EmptyVtableSlot*>* find_empty_vtable_slots(
+ InstanceKlass* klass, GrowableArray<Method*>* mirandas, TRAPS) {
+
+ assert(klass != NULL, "Must be valid class");
+
+ GrowableArray<EmptyVtableSlot*>* slots = new GrowableArray<EmptyVtableSlot*>();
+
+ // All miranda methods are obvious candidates
+ for (int i = 0; i < mirandas->length(); ++i) {
+ EmptyVtableSlot* slot = new EmptyVtableSlot(mirandas->at(i));
+ slots->append(slot);
+ }
+
+ // Also any overpasses in our superclasses, that we haven't implemented.
+ // (can't use the vtable because it is not guaranteed to be initialized yet)
+ InstanceKlass* super = klass->java_super();
+ while (super != NULL) {
+ for (int i = 0; i < super->methods()->length(); ++i) {
+ Method* m = super->methods()->at(i);
+ if (m->is_overpass()) {
+ // m is a method that would have been a miranda if not for the
+ // default method processing that occurred on behalf of our superclass,
+ // so it's a method we want to re-examine in this new context. That is,
+ // unless we have a real implementation of it in the current class.
+ Method* impl = klass->lookup_method(m->name(), m->signature());
+ if (impl == NULL || impl->is_overpass()) {
+ slots->append(new EmptyVtableSlot(m));
+ }
+ }
+ }
+ super = super->java_super();
+ }
+
+#ifndef PRODUCT
+ if (TraceDefaultMethods) {
+ tty->print_cr("Slots that need filling:");
+ streamIndentor si(tty);
+ for (int i = 0; i < slots->length(); ++i) {
+ tty->indent();
+ slots->at(i)->print_on(tty);
+ tty->print_cr("");
+ }
+ }
+#endif // ndef PRODUCT
+ return slots;
+}
+
+// Iterates over the type hierarchy looking for all methods with a specific
+// method name. The result of this is a set of method families each of
+// which is populated with a set of methods that implement the same
+// language-level signature.
+class FindMethodsByName : public HierarchyVisitor<FindMethodsByName> {
+ private:
+ // Context data
+ Thread* THREAD;
+ generic::DescriptorCache* _cache;
+ Symbol* _method_name;
+ generic::Context* _ctx;
+ StatefulMethodFamilies _families;
+
+ public:
+
+ FindMethodsByName(generic::DescriptorCache* cache, Symbol* name,
+ generic::Context* ctx, Thread* thread) :
+ _cache(cache), _method_name(name), _ctx(ctx), THREAD(thread) {}
+
+ void get_discovered_families(GrowableArray<MethodFamily*>* methods) {
+ _families.extract_families_into(methods);
+ }
+
+ void* new_node_data(InstanceKlass* cls) { return new PseudoScope(); }
+ void free_node_data(void* node_data) {
+ PseudoScope::cast(node_data)->destroy();
+ }
+
+ bool visit() {
+ PseudoScope* scope = PseudoScope::cast(current_data());
+ InstanceKlass* klass = current_class();
+ InstanceKlass* sub = current_depth() > 0 ? class_at_depth(1) : NULL;
+
+ ContextMark* cm = new ContextMark(_ctx->mark());
+ scope->add_mark(cm); // will restore context when scope is freed
+
+ _ctx->apply_type_arguments(sub, klass, THREAD);
+
+ int start, end = 0;
+ start = klass->find_method_by_name(_method_name, &end);
+ if (start != -1) {
+ for (int i = start; i < end; ++i) {
+ Method* m = klass->methods()->at(i);
+ // This gets the method's parameter list with its generic type
+ // parameters resolved
+ generic::MethodDescriptor* md = _cache->descriptor_for(m, THREAD);
+
+ // Find all methods on this hierarchy that match this method
+ // (name, signature). This class collects other families of this
+ // method name.
+ StatefulMethodFamily* family =
+ _families.find_matching_or_create(md, _ctx);
+
+ if (klass->is_interface()) {
+ // ???
+ StateRestorer* restorer = family->record_method_and_dq_further(m);
+ scope->add_mark(restorer);
+ } else {
+ // This is the rule that methods in classes "win" (bad word) over
+ // methods in interfaces. This works because of single inheritance
+ family->set_target_if_empty(m);
+ }
+ }
+ }
+ return true;
+ }
+};
+
+#ifndef PRODUCT
+static void print_families(
+ GrowableArray<MethodFamily*>* methods, Symbol* match) {
+ streamIndentor si(tty, 4);
+ if (methods->length() == 0) {
+ tty->indent();
+ tty->print_cr("No Logical Method found");
+ }
+ for (int i = 0; i < methods->length(); ++i) {
+ tty->indent();
+ MethodFamily* lm = methods->at(i);
+ if (lm->contains_signature(match)) {
+ tty->print_cr("<Matching>");
+ } else {
+ tty->print_cr("<Non-Matching>");
+ }
+ lm->print_on(tty, 1);
+ }
+}
+#endif // ndef PRODUCT
+
+static void merge_in_new_methods(InstanceKlass* klass,
+ GrowableArray<Method*>* new_methods, TRAPS);
+static void create_overpasses(
+ GrowableArray<EmptyVtableSlot*>* slots, InstanceKlass* klass, TRAPS);
+
+// This is the guts of the default methods implementation. This is called just
+// after the classfile has been parsed if some ancestor has default methods.
+//
+// First if finds any name/signature slots that need any implementation (either
+// because they are miranda or a superclass's implementation is an overpass
+// itself). For each slot, iterate over the hierarchy, using generic signature
+// information to partition any methods that match the name into method families
+// where each family contains methods whose signatures are equivalent at the
+// language level (i.e., their reified parameters match and return values are
+// covariant). Check those sets to see if they contain a signature that matches
+// the slot we're looking at (if we're lucky, there might be other empty slots
+// that we can fill using the same analysis).
+//
+// For each slot filled, we generate an overpass method that either calls the
+// unique default method candidate using invokespecial, or throws an exception
+// (in the case of no default method candidates, or more than one valid
+// candidate). These methods are then added to the class's method list. If
+// the method set we're using contains methods (qualified or not) with a
+// different runtime signature than the method we're creating, then we have to
+// create bridges with those signatures too.
+void DefaultMethods::generate_default_methods(
+ InstanceKlass* klass, GrowableArray<Method*>* mirandas, TRAPS) {
+
+ // This resource mark is the bound for all memory allocation that takes
+ // place during default method processing. After this goes out of scope,
+ // all (Resource) objects' memory will be reclaimed. Be careful if adding an
+ // embedded resource mark under here as that memory can't be used outside
+ // whatever scope it's in.
+ ResourceMark rm(THREAD);
+
+ generic::DescriptorCache cache;
+
+ // Keep entire hierarchy alive for the duration of the computation
+ KeepAliveRegistrar keepAlive(THREAD);
+ KeepAliveVisitor loadKeepAlive(&keepAlive);
+ loadKeepAlive.run(klass);
+
+#ifndef PRODUCT
+ if (TraceDefaultMethods) {
+ ResourceMark rm; // be careful with these!
+ tty->print_cr("Class %s requires default method processing",
+ klass->name()->as_klass_external_name());
+ PrintHierarchy printer;
+ printer.run(klass);
+ }
+#endif // ndef PRODUCT
+
+ GrowableArray<EmptyVtableSlot*>* empty_slots =
+ find_empty_vtable_slots(klass, mirandas, CHECK);
+
+ for (int i = 0; i < empty_slots->length(); ++i) {
+ EmptyVtableSlot* slot = empty_slots->at(i);
+#ifndef PRODUCT
+ if (TraceDefaultMethods) {
+ streamIndentor si(tty, 2);
+ tty->indent().print("Looking for default methods for slot ");
+ slot->print_on(tty);
+ tty->print_cr("");
+ }
+#endif // ndef PRODUCT
+ if (slot->is_bound()) {
+#ifndef PRODUCT
+ if (TraceDefaultMethods) {
+ streamIndentor si(tty, 4);
+ tty->indent().print_cr("Already bound to logical method:");
+ slot->get_binding()->print_on(tty, 1);
+ }
+#endif // ndef PRODUCT
+ continue; // covered by previous processing
+ }
+
+ generic::Context ctx(&cache);
+ FindMethodsByName visitor(&cache, slot->name(), &ctx, CHECK);
+ visitor.run(klass);
+
+ GrowableArray<MethodFamily*> discovered_families;
+ visitor.get_discovered_families(&discovered_families);
+
+#ifndef PRODUCT
+ if (TraceDefaultMethods) {
+ print_families(&discovered_families, slot->signature());
+ }
+#endif // ndef PRODUCT
+
+ // Find and populate any other slots that match the discovered families
+ for (int j = i; j < empty_slots->length(); ++j) {
+ EmptyVtableSlot* open_slot = empty_slots->at(j);
+
+ if (slot->name() == open_slot->name()) {
+ for (int k = 0; k < discovered_families.length(); ++k) {
+ MethodFamily* lm = discovered_families.at(k);
+
+ if (lm->contains_signature(open_slot->signature())) {
+ lm->determine_target(klass, CHECK);
+ open_slot->bind_family(lm);
+ }
+ }
+ }
+ }
+ }
+
+#ifndef PRODUCT
+ if (TraceDefaultMethods) {
+ tty->print_cr("Creating overpasses...");
+ }
+#endif // ndef PRODUCT
+
+ create_overpasses(empty_slots, klass, CHECK);
+
+#ifndef PRODUCT
+ if (TraceDefaultMethods) {
+ tty->print_cr("Default method processing complete");
+ }
+#endif // ndef PRODUCT
+}
+
+
+/**
+ * Generic analysis was used upon interface '_target' and found a unique
+ * default method candidate with generic signature '_method_desc'. This
+ * method is only viable if it would also be in the set of default method
+ * candidates if we ran a full analysis on the current class.
+ *
+ * The only reason that the method would not be in the set of candidates for
+ * the current class is if that there's another covariantly matching method
+ * which is "more specific" than the found method -- i.e., one could find a
+ * path in the interface hierarchy in which the matching method appears
+ * before we get to '_target'.
+ *
+ * In order to determine this, we examine all of the implemented
+ * interfaces. If we find path that leads to the '_target' interface, then
+ * we examine that path to see if there are any methods that would shadow
+ * the selected method along that path.
+ */
+class ShadowChecker : public HierarchyVisitor<ShadowChecker> {
+ private:
+ generic::DescriptorCache* _cache;
+ Thread* THREAD;
+
+ InstanceKlass* _target;
+
+ Symbol* _method_name;
+ InstanceKlass* _method_holder;
+ generic::MethodDescriptor* _method_desc;
+ bool _found_shadow;
+
+ bool path_has_shadow() {
+ generic::Context ctx(_cache);
+
+ for (int i = current_depth() - 1; i > 0; --i) {
+ InstanceKlass* ik = class_at_depth(i);
+ InstanceKlass* sub = class_at_depth(i + 1);
+ ctx.apply_type_arguments(sub, ik, THREAD);
+
+ if (ik->is_interface()) {
+ int end;
+ int start = ik->find_method_by_name(_method_name, &end);
+ if (start != -1) {
+ for (int j = start; j < end; ++j) {
+ Method* mo = ik->methods()->at(j);
+ generic::MethodDescriptor* md = _cache->descriptor_for(mo, THREAD);
+ if (_method_desc->covariant_match(md, &ctx)) {
+ return true;
+ }
+ }
+ }
+ }
+ }
+ return false;
+ }
+
+ public:
+
+ ShadowChecker(generic::DescriptorCache* cache, Thread* thread,
+ Symbol* name, InstanceKlass* holder, generic::MethodDescriptor* desc,
+ InstanceKlass* target)
+ : _cache(cache), THREAD(thread), _method_name(name), _method_holder(holder),
+ _method_desc(desc), _target(target), _found_shadow(false) {}
+
+ void* new_node_data(InstanceKlass* cls) { return NULL; }
+ void free_node_data(void* data) { return; }
+
+ bool visit() {
+ InstanceKlass* ik = current_class();
+ if (ik == _target && current_depth() == 1) {
+ return false; // This was the specified super -- no need to search it
+ }
+ if (ik == _method_holder || ik == _target) {
+ // We found a path that should be examined to see if it shadows _method
+ if (path_has_shadow()) {
+ _found_shadow = true;
+ cancel_iteration();
+ }
+ return false; // no need to continue up hierarchy
+ }
+ return true;
+ }
+
+ bool found_shadow() { return _found_shadow; }
+};
+
+// This is called during linktime when we find an invokespecial call that
+// refers to a direct superinterface. It indicates that we should find the
+// default method in the hierarchy of that superinterface, and if that method
+// would have been a candidate from the point of view of 'this' class, then we
+// return that method.
+Method* DefaultMethods::find_super_default(
+ Klass* cls, Klass* super, Symbol* method_name, Symbol* sig, TRAPS) {
+
+ ResourceMark rm(THREAD);
+
+ assert(cls != NULL && super != NULL, "Need real classes");
+
+ InstanceKlass* current_class = InstanceKlass::cast(cls);
+ InstanceKlass* direction = InstanceKlass::cast(super);
+
+ // Keep entire hierarchy alive for the duration of the computation
+ KeepAliveRegistrar keepAlive(THREAD);
+ KeepAliveVisitor loadKeepAlive(&keepAlive);
+ loadKeepAlive.run(current_class);
+
+#ifndef PRODUCT
+ if (TraceDefaultMethods) {
+ tty->print_cr("Finding super default method %s.%s%s from %s",
+ direction->name()->as_C_string(),
+ method_name->as_C_string(), sig->as_C_string(),
+ current_class->name()->as_C_string());
+ }
+#endif // ndef PRODUCT
+
+ if (!direction->is_interface()) {
+ // We should not be here
+ return NULL;
+ }
+
+ generic::DescriptorCache cache;
+ generic::Context ctx(&cache);
+
+ // Prime the initial generic context for current -> direction
+ ctx.apply_type_arguments(current_class, direction, CHECK_NULL);
+
+ FindMethodsByName visitor(&cache, method_name, &ctx, CHECK_NULL);
+ visitor.run(direction);
+
+ GrowableArray<MethodFamily*> families;
+ visitor.get_discovered_families(&families);
+
+#ifndef PRODUCT
+ if (TraceDefaultMethods) {
+ print_families(&families, sig);
+ }
+#endif // ndef PRODUCT
+
+ MethodFamily* selected_family = NULL;
+
+ for (int i = 0; i < families.length(); ++i) {
+ MethodFamily* lm = families.at(i);
+ if (lm->contains_signature(sig)) {
+ lm->determine_target(current_class, CHECK_NULL);
+ selected_family = lm;
+ }
+ }
+
+ if (selected_family->has_target()) {
+ Method* target = selected_family->get_selected_target();
+ InstanceKlass* holder = InstanceKlass::cast(target->method_holder());
+
+ // Verify that the identified method is valid from the context of
+ // the current class
+ ShadowChecker checker(&cache, THREAD, target->name(),
+ holder, selected_family->descriptor(), direction);
+ checker.run(current_class);
+
+ if (checker.found_shadow()) {
+#ifndef PRODUCT
+ if (TraceDefaultMethods) {
+ tty->print_cr(" Only candidate found was shadowed.");
+ }
+#endif // ndef PRODUCT
+ THROW_MSG_(vmSymbols::java_lang_AbstractMethodError(),
+ "Accessible default method not found", NULL);
+ } else {
+#ifndef PRODUCT
+ if (TraceDefaultMethods) {
+ tty->print(" Returning ");
+ print_method(tty, target, true);
+ tty->print_cr("");
+ }
+#endif // ndef PRODUCT
+ return target;
+ }
+ } else {
+ assert(selected_family->throws_exception(), "must have target or throw");
+ THROW_MSG_(vmSymbols::java_lang_AbstractMethodError(),
+ selected_family->get_exception_message()->as_C_string(), NULL);
+ }
+}
+
+
+static int assemble_redirect(
+ BytecodeConstantPool* cp, BytecodeBuffer* buffer,
+ Symbol* incoming, Method* target, TRAPS) {
+
+ BytecodeAssembler assem(buffer, cp);
+
+ SignatureStream in(incoming, true);
+ SignatureStream out(target->signature(), true);
+ u2 parameter_count = 0;
+
+ assem.aload(parameter_count++); // load 'this'
+
+ while (!in.at_return_type()) {
+ assert(!out.at_return_type(), "Parameter counts do not match");
+ BasicType bt = in.type();
+ assert(out.type() == bt, "Parameter types are not compatible");
+ assem.load(bt, parameter_count);
+ if (in.is_object() && in.as_symbol(THREAD) != out.as_symbol(THREAD)) {
+ assem.checkcast(out.as_symbol(THREAD));
+ } else if (bt == T_LONG || bt == T_DOUBLE) {
+ ++parameter_count; // longs and doubles use two slots
+ }
+ ++parameter_count;
+ in.next();
+ out.next();
+ }
+ assert(out.at_return_type(), "Parameter counts do not match");
+ assert(in.type() == out.type(), "Return types are not compatible");
+
+ if (parameter_count == 1 && (in.type() == T_LONG || in.type() == T_DOUBLE)) {
+ ++parameter_count; // need room for return value
+ }
+ if (target->method_holder()->is_interface()) {
+ assem.invokespecial(target);
+ } else {
+ assem.invokevirtual(target);
+ }
+
+ if (in.is_object() && in.as_symbol(THREAD) != out.as_symbol(THREAD)) {
+ assem.checkcast(in.as_symbol(THREAD));
+ }
+ assem._return(in.type());
+ return parameter_count;
+}
+
+static int assemble_abstract_method_error(
+ BytecodeConstantPool* cp, BytecodeBuffer* buffer, Symbol* message, TRAPS) {
+
+ Symbol* errorName = vmSymbols::java_lang_AbstractMethodError();
+ Symbol* init = vmSymbols::object_initializer_name();
+ Symbol* sig = vmSymbols::string_void_signature();
+
+ BytecodeAssembler assem(buffer, cp);
+
+ assem._new(errorName);
+ assem.dup();
+ assem.load_string(message);
+ assem.invokespecial(errorName, init, sig);
+ assem.athrow();
+
+ return 3; // max stack size: [ exception, exception, string ]
+}
+
+static Method* new_method(
+ BytecodeConstantPool* cp, BytecodeBuffer* bytecodes, Symbol* name,
+ Symbol* sig, AccessFlags flags, int max_stack, int params,
+ ConstMethod::MethodType mt, TRAPS) {
+
+ address code_start = static_cast<address>(bytecodes->adr_at(0));
+ int code_length = bytecodes->length();
+
+ Method* m = Method::allocate(cp->pool_holder()->class_loader_data(),
+ code_length, flags, 0, 0, 0, 0, mt, CHECK_NULL);
+
+ m->set_constants(NULL); // This will get filled in later
+ m->set_name_index(cp->utf8(name));
+ m->set_signature_index(cp->utf8(sig));
+ m->set_generic_signature_index(0);
+#ifdef CC_INTERP
+ ResultTypeFinder rtf(sig);
+ m->set_result_index(rtf.type());
+#endif
+ m->set_size_of_parameters(params);
+ m->set_max_stack(max_stack);
+ m->set_max_locals(params);
+ m->constMethod()->set_stackmap_data(NULL);
+ m->set_code(code_start);
+ m->set_force_inline(true);
+
+ return m;
+}
+
+static void switchover_constant_pool(BytecodeConstantPool* bpool,
+ InstanceKlass* klass, GrowableArray<Method*>* new_methods, TRAPS) {
+
+ if (new_methods->length() > 0) {
+ ConstantPool* cp = bpool->create_constant_pool(CHECK);
+ if (cp != klass->constants()) {
+ klass->class_loader_data()->add_to_deallocate_list(klass->constants());
+ klass->set_constants(cp);
+ cp->set_pool_holder(klass);
+
+ for (int i = 0; i < new_methods->length(); ++i) {
+ new_methods->at(i)->set_constants(cp);
+ }
+ for (int i = 0; i < klass->methods()->length(); ++i) {
+ Method* mo = klass->methods()->at(i);
+ mo->set_constants(cp);
+ }
+ }
+ }
+}
+
+// A "bridge" is a method created by javac to bridge the gap between
+// an implementation and a generically-compatible, but different, signature.
+// Bridges have actual bytecode implementation in classfiles.
+// An "overpass", on the other hand, performs the same function as a bridge
+// but does not occur in a classfile; the VM creates overpass itself,
+// when it needs a path to get from a call site to an default method, and
+// a bridge doesn't exist.
+static void create_overpasses(
+ GrowableArray<EmptyVtableSlot*>* slots,
+ InstanceKlass* klass, TRAPS) {
+
+ GrowableArray<Method*> overpasses;
+ BytecodeConstantPool bpool(klass->constants());
+
+ for (int i = 0; i < slots->length(); ++i) {
+ EmptyVtableSlot* slot = slots->at(i);
+
+ if (slot->is_bound()) {
+ MethodFamily* method = slot->get_binding();
+ int max_stack = 0;
+ BytecodeBuffer buffer;
+
+#ifndef PRODUCT
+ if (TraceDefaultMethods) {
+ tty->print("for slot: ");
+ slot->print_on(tty);
+ tty->print_cr("");
+ if (method->has_target()) {
+ method->print_selected(tty, 1);
+ } else {
+ method->print_exception(tty, 1);
+ }
+ }
+#endif // ndef PRODUCT
+ if (method->has_target()) {
+ Method* selected = method->get_selected_target();
+ max_stack = assemble_redirect(
+ &bpool, &buffer, slot->signature(), selected, CHECK);
+ } else if (method->throws_exception()) {
+ max_stack = assemble_abstract_method_error(
+ &bpool, &buffer, method->get_exception_message(), CHECK);
+ }
+ AccessFlags flags = accessFlags_from(
+ JVM_ACC_PUBLIC | JVM_ACC_SYNTHETIC | JVM_ACC_BRIDGE);
+ Method* m = new_method(&bpool, &buffer, slot->name(), slot->signature(),
+ flags, max_stack, slot->size_of_parameters(),
+ ConstMethod::OVERPASS, CHECK);
+ if (m != NULL) {
+ overpasses.push(m);
+ }
+ }
+ }
+
+#ifndef PRODUCT
+ if (TraceDefaultMethods) {
+ tty->print_cr("Created %d overpass methods", overpasses.length());
+ }
+#endif // ndef PRODUCT
+
+ switchover_constant_pool(&bpool, klass, &overpasses, CHECK);
+ merge_in_new_methods(klass, &overpasses, CHECK);
+}
+
+static void sort_methods(GrowableArray<Method*>* methods) {
+ // Note that this must sort using the same key as is used for sorting
+ // methods in InstanceKlass.
+ bool sorted = true;
+ for (int i = methods->length() - 1; i > 0; --i) {
+ for (int j = 0; j < i; ++j) {
+ Method* m1 = methods->at(j);
+ Method* m2 = methods->at(j + 1);
+ if ((uintptr_t)m1->name() > (uintptr_t)m2->name()) {
+ methods->at_put(j, m2);
+ methods->at_put(j + 1, m1);
+ sorted = false;
+ }
+ }
+ if (sorted) break;
+ sorted = true;
+ }
+#ifdef ASSERT
+ uintptr_t prev = 0;
+ for (int i = 0; i < methods->length(); ++i) {
+ Method* mh = methods->at(i);
+ uintptr_t nv = (uintptr_t)mh->name();
+ assert(nv >= prev, "Incorrect overpass method ordering");
+ prev = nv;
+ }
+#endif
+}
+
+static void merge_in_new_methods(InstanceKlass* klass,
+ GrowableArray<Method*>* new_methods, TRAPS) {
+
+ enum { ANNOTATIONS, PARAMETERS, DEFAULTS, NUM_ARRAYS };
+
+ Array<AnnotationArray*>* original_annots[NUM_ARRAYS];
+
+ Array<Method*>* original_methods = klass->methods();
+ Annotations* annots = klass->annotations();
+ original_annots[ANNOTATIONS] = annots->methods_annotations();
+ original_annots[PARAMETERS] = annots->methods_parameter_annotations();
+ original_annots[DEFAULTS] = annots->methods_default_annotations();
+
+ Array<int>* original_ordering = klass->method_ordering();
+ Array<int>* merged_ordering = Universe::the_empty_int_array();
+
+ int new_size = klass->methods()->length() + new_methods->length();
+
+ Array<AnnotationArray*>* merged_annots[NUM_ARRAYS];
+
+ Array<Method*>* merged_methods = MetadataFactory::new_array<Method*>(
+ klass->class_loader_data(), new_size, NULL, CHECK);
+ for (int i = 0; i < NUM_ARRAYS; ++i) {
+ if (original_annots[i] != NULL) {
+ merged_annots[i] = MetadataFactory::new_array<AnnotationArray*>(
+ klass->class_loader_data(), new_size, CHECK);
+ } else {
+ merged_annots[i] = NULL;
+ }
+ }
+ if (original_ordering != NULL && original_ordering->length() > 0) {
+ merged_ordering = MetadataFactory::new_array<int>(
+ klass->class_loader_data(), new_size, CHECK);
+ }
+ int method_order_index = klass->methods()->length();
+
+ sort_methods(new_methods);
+
+ // Perform grand merge of existing methods and new methods
+ int orig_idx = 0;
+ int new_idx = 0;
+
+ for (int i = 0; i < new_size; ++i) {
+ Method* orig_method = NULL;
+ Method* new_method = NULL;
+ if (orig_idx < original_methods->length()) {
+ orig_method = original_methods->at(orig_idx);
+ }
+ if (new_idx < new_methods->length()) {
+ new_method = new_methods->at(new_idx);
+ }
+
+ if (orig_method != NULL &&
+ (new_method == NULL || orig_method->name() < new_method->name())) {
+ merged_methods->at_put(i, orig_method);
+ original_methods->at_put(orig_idx, NULL);
+ for (int j = 0; j < NUM_ARRAYS; ++j) {
+ if (merged_annots[j] != NULL) {
+ merged_annots[j]->at_put(i, original_annots[j]->at(orig_idx));
+ original_annots[j]->at_put(orig_idx, NULL);
+ }
+ }
+ if (merged_ordering->length() > 0) {
+ merged_ordering->at_put(i, original_ordering->at(orig_idx));
+ }
+ ++orig_idx;
+ } else {
+ merged_methods->at_put(i, new_method);
+ if (merged_ordering->length() > 0) {
+ merged_ordering->at_put(i, method_order_index++);
+ }
+ ++new_idx;
+ }
+ // update idnum for new location
+ merged_methods->at(i)->set_method_idnum(i);
+ }
+
+ // Verify correct order
+#ifdef ASSERT
+ uintptr_t prev = 0;
+ for (int i = 0; i < merged_methods->length(); ++i) {
+ Method* mo = merged_methods->at(i);
+ uintptr_t nv = (uintptr_t)mo->name();
+ assert(nv >= prev, "Incorrect method ordering");
+ prev = nv;
+ }
+#endif
+
+ // Replace klass methods with new merged lists
+ klass->set_methods(merged_methods);
+ annots->set_methods_annotations(merged_annots[ANNOTATIONS]);
+ annots->set_methods_parameter_annotations(merged_annots[PARAMETERS]);
+ annots->set_methods_default_annotations(merged_annots[DEFAULTS]);
+
+ ClassLoaderData* cld = klass->class_loader_data();
+ MetadataFactory::free_array(cld, original_methods);
+ for (int i = 0; i < NUM_ARRAYS; ++i) {
+ MetadataFactory::free_array(cld, original_annots[i]);
+ }
+ if (original_ordering->length() > 0) {
+ klass->set_method_ordering(merged_ordering);
+ MetadataFactory::free_array(cld, original_ordering);
+ }
+}
+
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hotspot/src/share/vm/classfile/defaultMethods.hpp Fri Nov 09 08:20:03 2012 -0800
@@ -0,0 +1,58 @@
+/*
+ * Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ *
+ */
+
+#ifndef SHARE_VM_CLASSFILE_DEFAULTMETHODS_HPP
+#define SHARE_VM_CLASSFILE_DEFAULTMETHODS_HPP
+
+#include "runtime/handles.hpp"
+#include "utilities/growableArray.hpp"
+#include "utilities/exceptions.hpp"
+
+class InstanceKlass;
+class Symbol;
+class Method;
+
+class DefaultMethods : AllStatic {
+ public:
+
+ // Analyzes class and determines which default methods are inherited
+ // from interfaces (and has no other implementation). For each method
+ // (and each different signature the method could have), create an
+ // "overpass" method that is an instance method that redirects to the
+ // default method. Overpass methods are added to the methods lists for
+ // the class.
+ static void generate_default_methods(
+ InstanceKlass* klass, GrowableArray<Method*>* mirandas, TRAPS);
+
+
+ // Called during linking when an invokespecial to an direct interface
+ // method is found. Selects and returns a method if there is a unique
+ // default method in the 'super_iface' part of the hierarchy which is
+ // also a candidate default for 'this_klass'. Otherwise throws an AME.
+ static Method* find_super_default(
+ Klass* this_klass, Klass* super_iface,
+ Symbol* method_name, Symbol* method_sig, TRAPS);
+};
+
+#endif // SHARE_VM_CLASSFILE_DEFAULTMETHODS_HPP
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hotspot/src/share/vm/classfile/genericSignatures.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -0,0 +1,1272 @@
+/*
+ * Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ *
+ */
+
+#include "precompiled.hpp"
+
+#include "classfile/genericSignatures.hpp"
+#include "classfile/symbolTable.hpp"
+#include "classfile/systemDictionary.hpp"
+#include "memory/resourceArea.hpp"
+
+namespace generic {
+
+// Helper class for parsing the generic signature Symbol in klass and methods
+class DescriptorStream : public ResourceObj {
+ private:
+ Symbol* _symbol;
+ int _offset;
+ int _mark;
+ const char* _parse_error;
+
+ void set_parse_error(const char* error) {
+ assert(error != NULL, "Can't set NULL error string");
+ _parse_error = error;
+ }
+
+ public:
+ DescriptorStream(Symbol* sym)
+ : _symbol(sym), _offset(0), _mark(-1), _parse_error(NULL) {}
+
+ const char* parse_error() const {
+ return _parse_error;
+ }
+
+ bool at_end() { return _offset >= _symbol->utf8_length(); }
+
+ char peek() {
+ if (at_end()) {
+ set_parse_error("Peeking past end of signature");
+ return '\0';
+ } else {
+ return _symbol->byte_at(_offset);
+ }
+ }
+
+ char read() {
+ if (at_end()) {
+ set_parse_error("Reading past end of signature");
+ return '\0';
+ } else {
+ return _symbol->byte_at(_offset++);
+ }
+ }
+
+ void read(char expected) {
+ char c = read();
+ assert_char(c, expected, 0);
+ }
+
+ void assert_char(char c, char expected, int pos = -1) {
+ if (c != expected) {
+ const char* fmt = "Parse error at %d: expected %c but got %c";
+ size_t len = strlen(fmt) + 5;
+ char* buffer = NEW_RESOURCE_ARRAY(char, len);
+ jio_snprintf(buffer, len, fmt, _offset + pos, expected, c);
+ set_parse_error(buffer);
+ }
+ }
+
+ void push(char c) {
+ assert(c == _symbol->byte_at(_offset - 1), "Pushing back wrong value");
+ --_offset;
+ }
+
+ void expect_end() {
+ if (!at_end()) {
+ set_parse_error("Unexpected data trailing signature");
+ }
+ }
+
+ bool has_mark() { return _mark != -1; }
+
+ void set_mark() {
+ _mark = _offset;
+ }
+
+ Identifier* identifier_from_mark() {
+ assert(has_mark(), "Mark should be set");
+ if (!has_mark()) {
+ set_parse_error("Expected mark to be set");
+ return NULL;
+ } else {
+ Identifier* id = new Identifier(_symbol, _mark, _offset - 1);
+ _mark = -1;
+ return id;
+ }
+ }
+};
+
+
+#define CHECK_FOR_PARSE_ERROR() \
+ if (STREAM->parse_error() != NULL) { \
+ if (VerifyGenericSignatures) { \
+ fatal(STREAM->parse_error()); \
+ } \
+ return NULL; \
+ } 0
+
+#define READ() STREAM->read(); CHECK_FOR_PARSE_ERROR()
+#define PEEK() STREAM->peek(); CHECK_FOR_PARSE_ERROR()
+#define PUSH(c) STREAM->push(c)
+#define EXPECT(c) STREAM->read(c); CHECK_FOR_PARSE_ERROR()
+#define EXPECTED(c, ch) STREAM->assert_char(c, ch); CHECK_FOR_PARSE_ERROR()
+#define EXPECT_END() STREAM->expect_end(); CHECK_FOR_PARSE_ERROR()
+
+#define CHECK_STREAM STREAM); CHECK_FOR_PARSE_ERROR(); (0
+
+#ifndef PRODUCT
+void Identifier::print_on(outputStream* str) const {
+ for (int i = _begin; i < _end; ++i) {
+ str->print("%c", (char)_sym->byte_at(i));
+ }
+}
+#endif // ndef PRODUCT
+
+bool Identifier::equals(Identifier* other) {
+ if (_sym == other->_sym && _begin == other->_begin && _end == other->_end) {
+ return true;
+ } else if (_end - _begin != other->_end - other->_begin) {
+ return false;
+ } else {
+ size_t len = _end - _begin;
+ char* addr = ((char*)_sym->bytes()) + _begin;
+ char* oaddr = ((char*)other->_sym->bytes()) + other->_begin;
+ return strncmp(addr, oaddr, len) == 0;
+ }
+}
+
+bool Identifier::equals(Symbol* sym) {
+ Identifier id(sym, 0, sym->utf8_length());
+ return equals(&id);
+}
+
+/**
+ * A formal type parameter may be found in the the enclosing class, but it could
+ * also come from an enclosing method or outer class, in the case of inner-outer
+ * classes or anonymous classes. For example:
+ *
+ * class Outer<T,V> {
+ * class Inner<W> {
+ * void m(T t, V v, W w);
+ * }
+ * }
+ *
+ * In this case, the type variables in m()'s signature are not all found in the
+ * immediate enclosing class (Inner). class Inner has only type parameter W,
+ * but it's outer_class field will reference Outer's descriptor which contains
+ * T & V (no outer_method in this case).
+ *
+ * If you have an anonymous class, it has both an enclosing method *and* an
+ * enclosing class where type parameters can be declared:
+ *
+ * class MOuter<T> {
+ * <V> void bar(V v) {
+ * Runnable r = new Runnable() {
+ * public void run() {}
+ * public void foo(T t, V v) { ... }
+ * };
+ * }
+ * }
+ *
+ * In this case, foo will be a member of some class, Runnable$1, which has no
+ * formal parameters itself, but has an outer_method (bar()) which provides
+ * type parameter V, and an outer class MOuter with type parameter T.
+ *
+ * It is also possible that the outer class is itself an inner class to some
+ * other class (or an anonymous class with an enclosing method), so we need to
+ * follow the outer_class/outer_method chain to it's end when looking for a
+ * type parameter.
+ */
+TypeParameter* Descriptor::find_type_parameter(Identifier* id, int* depth) {
+
+ int current_depth = 0;
+
+ MethodDescriptor* outer_method = as_method_signature();
+ ClassDescriptor* outer_class = as_class_signature();
+
+ if (outer_class == NULL) { // 'this' is a method signature; use the holder
+ outer_class = outer_method->outer_class();
+ }
+
+ while (outer_method != NULL || outer_class != NULL) {
+ if (outer_method != NULL) {
+ for (int i = 0; i < outer_method->type_parameters().length(); ++i) {
+ TypeParameter* p = outer_method->type_parameters().at(i);
+ if (p->identifier()->equals(id)) {
+ *depth = -1; // indicates this this is a method parameter
+ return p;
+ }
+ }
+ }
+ if (outer_class != NULL) {
+ for (int i = 0; i < outer_class->type_parameters().length(); ++i) {
+ TypeParameter* p = outer_class->type_parameters().at(i);
+ if (p->identifier()->equals(id)) {
+ *depth = current_depth;
+ return p;
+ }
+ }
+ outer_method = outer_class->outer_method();
+ outer_class = outer_class->outer_class();
+ ++current_depth;
+ }
+ }
+
+ if (VerifyGenericSignatures) {
+ fatal("Could not resolve identifier");
+ }
+
+ return NULL;
+}
+
+ClassDescriptor* ClassDescriptor::parse_generic_signature(Klass* klass, TRAPS) {
+ return parse_generic_signature(klass, NULL, CHECK_NULL);
+}
+
+ClassDescriptor* ClassDescriptor::parse_generic_signature(
+ Klass* klass, Symbol* original_name, TRAPS) {
+
+ InstanceKlass* ik = InstanceKlass::cast(klass);
+ Symbol* sym = ik->generic_signature();
+
+ ClassDescriptor* spec;
+
+ if (sym == NULL || (spec = ClassDescriptor::parse_generic_signature(sym)) == NULL) {
+ spec = ClassDescriptor::placeholder(ik);
+ }
+
+ u2 outer_index = get_outer_class_index(ik, CHECK_NULL);
+ if (outer_index != 0) {
+ if (original_name == NULL) {
+ original_name = ik->name();
+ }
+ Handle class_loader = Handle(THREAD, ik->class_loader());
+ Handle protection_domain = Handle(THREAD, ik->protection_domain());
+
+ Symbol* outer_name = ik->constants()->klass_name_at(outer_index);
+ Klass* outer = SystemDictionary::find(
+ outer_name, class_loader, protection_domain, CHECK_NULL);
+ if (outer == NULL && !THREAD->is_Compiler_thread()) {
+ outer = SystemDictionary::resolve_super_or_fail(original_name,
+ outer_name, class_loader, protection_domain, false, CHECK_NULL);
+ }
+
+ InstanceKlass* outer_ik;
+ ClassDescriptor* outer_spec = NULL;
+ if (outer == NULL) {
+ outer_spec = ClassDescriptor::placeholder(ik);
+ assert(false, "Outer class not loaded and not loadable from here");
+ } else {
+ outer_ik = InstanceKlass::cast(outer);
+ outer_spec = parse_generic_signature(outer, original_name, CHECK_NULL);
+ }
+ spec->set_outer_class(outer_spec);
+
+ u2 encl_method_idx = ik->enclosing_method_method_index();
+ if (encl_method_idx != 0 && outer_ik != NULL) {
+ ConstantPool* cp = ik->constants();
+ u2 name_index = cp->name_ref_index_at(encl_method_idx);
+ u2 sig_index = cp->signature_ref_index_at(encl_method_idx);
+ Symbol* name = cp->symbol_at(name_index);
+ Symbol* sig = cp->symbol_at(sig_index);
+ Method* m = outer_ik->find_method(name, sig);
+ if (m != NULL) {
+ Symbol* gsig = m->generic_signature();
+ if (gsig != NULL) {
+ MethodDescriptor* gms = MethodDescriptor::parse_generic_signature(gsig, outer_spec);
+ spec->set_outer_method(gms);
+ }
+ } else if (VerifyGenericSignatures) {
+ ResourceMark rm;
+ stringStream ss;
+ ss.print("Could not find method %s %s in class %s",
+ name->as_C_string(), sig->as_C_string(), outer_name->as_C_string());
+ fatal(ss.as_string());
+ }
+ }
+ }
+
+ spec->bind_variables_to_parameters();
+ return spec;
+}
+
+ClassDescriptor* ClassDescriptor::placeholder(InstanceKlass* klass) {
+ GrowableArray<TypeParameter*> formals;
+ GrowableArray<ClassType*> interfaces;
+ ClassType* super_type = NULL;
+
+ Klass* super_klass = klass->super();
+ if (super_klass != NULL) {
+ InstanceKlass* super = InstanceKlass::cast(super_klass);
+ super_type = ClassType::from_symbol(super->name());
+ }
+
+ for (int i = 0; i < klass->local_interfaces()->length(); ++i) {
+ InstanceKlass* iface = InstanceKlass::cast(klass->local_interfaces()->at(i));
+ interfaces.append(ClassType::from_symbol(iface->name()));
+ }
+ return new ClassDescriptor(formals, super_type, interfaces);
+}
+
+ClassDescriptor* ClassDescriptor::parse_generic_signature(Symbol* sym) {
+
+ DescriptorStream ds(sym);
+ DescriptorStream* STREAM = &ds;
+
+ GrowableArray<TypeParameter*> parameters(8);
+ char c = READ();
+ if (c == '<') {
+ c = READ();
+ while (c != '>') {
+ PUSH(c);
+ TypeParameter* ftp = TypeParameter::parse_generic_signature(CHECK_STREAM);
+ parameters.append(ftp);
+ c = READ();
+ }
+ } else {
+ PUSH(c);
+ }
+
+ EXPECT('L');
+ ClassType* super = ClassType::parse_generic_signature(CHECK_STREAM);
+
+ GrowableArray<ClassType*> signatures(2);
+ while (!STREAM->at_end()) {
+ EXPECT('L');
+ ClassType* iface = ClassType::parse_generic_signature(CHECK_STREAM);
+ signatures.append(iface);
+ }
+
+ EXPECT_END();
+
+ return new ClassDescriptor(parameters, super, signatures);
+}
+
+#ifndef PRODUCT
+void ClassDescriptor::print_on(outputStream* str) const {
+ str->indent().print_cr("ClassDescriptor {");
+ {
+ streamIndentor si(str);
+ if (_type_parameters.length() > 0) {
+ str->indent().print_cr("Formals {");
+ {
+ streamIndentor si(str);
+ for (int i = 0; i < _type_parameters.length(); ++i) {
+ _type_parameters.at(i)->print_on(str);
+ }
+ }
+ str->indent().print_cr("}");
+ }
+ if (_super != NULL) {
+ str->indent().print_cr("Superclass: ");
+ {
+ streamIndentor si(str);
+ _super->print_on(str);
+ }
+ }
+ if (_interfaces.length() > 0) {
+ str->indent().print_cr("SuperInterfaces: {");
+ {
+ streamIndentor si(str);
+ for (int i = 0; i < _interfaces.length(); ++i) {
+ _interfaces.at(i)->print_on(str);
+ }
+ }
+ str->indent().print_cr("}");
+ }
+ if (_outer_method != NULL) {
+ str->indent().print_cr("Outer Method: {");
+ {
+ streamIndentor si(str);
+ _outer_method->print_on(str);
+ }
+ str->indent().print_cr("}");
+ }
+ if (_outer_class != NULL) {
+ str->indent().print_cr("Outer Class: {");
+ {
+ streamIndentor si(str);
+ _outer_class->print_on(str);
+ }
+ str->indent().print_cr("}");
+ }
+ }
+ str->indent().print_cr("}");
+}
+#endif // ndef PRODUCT
+
+ClassType* ClassDescriptor::interface_desc(Symbol* sym) {
+ for (int i = 0; i < _interfaces.length(); ++i) {
+ if (_interfaces.at(i)->identifier()->equals(sym)) {
+ return _interfaces.at(i);
+ }
+ }
+ if (VerifyGenericSignatures) {
+ fatal("Did not find expected interface");
+ }
+ return NULL;
+}
+
+void ClassDescriptor::bind_variables_to_parameters() {
+ if (_outer_class != NULL) {
+ _outer_class->bind_variables_to_parameters();
+ }
+ if (_outer_method != NULL) {
+ _outer_method->bind_variables_to_parameters();
+ }
+ for (int i = 0; i < _type_parameters.length(); ++i) {
+ _type_parameters.at(i)->bind_variables_to_parameters(this, i);
+ }
+ if (_super != NULL) {
+ _super->bind_variables_to_parameters(this);
+ }
+ for (int i = 0; i < _interfaces.length(); ++i) {
+ _interfaces.at(i)->bind_variables_to_parameters(this);
+ }
+}
+
+ClassDescriptor* ClassDescriptor::canonicalize(Context* ctx) {
+
+ GrowableArray<TypeParameter*> type_params(_type_parameters.length());
+ for (int i = 0; i < _type_parameters.length(); ++i) {
+ type_params.append(_type_parameters.at(i)->canonicalize(ctx, 0));
+ }
+
+ ClassDescriptor* outer = _outer_class == NULL ? NULL :
+ _outer_class->canonicalize(ctx);
+
+ ClassType* super = _super == NULL ? NULL : _super->canonicalize(ctx, 0);
+
+ GrowableArray<ClassType*> interfaces(_interfaces.length());
+ for (int i = 0; i < _interfaces.length(); ++i) {
+ interfaces.append(_interfaces.at(i)->canonicalize(ctx, 0));
+ }
+
+ MethodDescriptor* md = _outer_method == NULL ? NULL :
+ _outer_method->canonicalize(ctx);
+
+ return new ClassDescriptor(type_params, super, interfaces, outer, md);
+}
+
+u2 ClassDescriptor::get_outer_class_index(InstanceKlass* klass, TRAPS) {
+ int inner_index = InstanceKlass::inner_class_inner_class_info_offset;
+ int outer_index = InstanceKlass::inner_class_outer_class_info_offset;
+ int name_offset = InstanceKlass::inner_class_inner_name_offset;
+ int next_offset = InstanceKlass::inner_class_next_offset;
+
+ if (klass->inner_classes() == NULL || klass->inner_classes()->length() == 0) {
+ // No inner class info => no declaring class
+ return 0;
+ }
+
+ Array<u2>* i_icls = klass->inner_classes();
+ ConstantPool* i_cp = klass->constants();
+ int i_length = i_icls->length();
+
+ // Find inner_klass attribute
+ for (int i = 0; i + next_offset < i_length; i += next_offset) {
+ u2 ioff = i_icls->at(i + inner_index);
+ u2 ooff = i_icls->at(i + outer_index);
+ u2 noff = i_icls->at(i + name_offset);
+ if (ioff != 0) {
+ // Check to see if the name matches the class we're looking for
+ // before attempting to find the class.
+ if (i_cp->klass_name_at_matches(klass, ioff) && ooff != 0) {
+ return ooff;
+ }
+ }
+ }
+
+ // It may be anonymous; try for that.
+ u2 encl_method_class_idx = klass->enclosing_method_class_index();
+ if (encl_method_class_idx != 0) {
+ return encl_method_class_idx;
+ }
+
+ return 0;
+}
+
+MethodDescriptor* MethodDescriptor::parse_generic_signature(Method* m, ClassDescriptor* outer) {
+ Symbol* generic_sig = m->generic_signature();
+ MethodDescriptor* md = NULL;
+ if (generic_sig == NULL || (md = parse_generic_signature(generic_sig, outer)) == NULL) {
+ md = parse_generic_signature(m->signature(), outer);
+ }
+ assert(md != NULL, "Could not parse method signature");
+ md->bind_variables_to_parameters();
+ return md;
+}
+
+MethodDescriptor* MethodDescriptor::parse_generic_signature(Symbol* sym, ClassDescriptor* outer) {
+
+ DescriptorStream ds(sym);
+ DescriptorStream* STREAM = &ds;
+
+ GrowableArray<TypeParameter*> params(8);
+ char c = READ();
+ if (c == '<') {
+ c = READ();
+ while (c != '>') {
+ PUSH(c);
+ TypeParameter* ftp = TypeParameter::parse_generic_signature(CHECK_STREAM);
+ params.append(ftp);
+ c = READ();
+ }
+ } else {
+ PUSH(c);
+ }
+
+ EXPECT('(');
+
+ GrowableArray<Type*> parameters(8);
+ c = READ();
+ while (c != ')') {
+ PUSH(c);
+ Type* arg = Type::parse_generic_signature(CHECK_STREAM);
+ parameters.append(arg);
+ c = READ();
+ }
+
+ Type* rt = Type::parse_generic_signature(CHECK_STREAM);
+
+ GrowableArray<Type*> throws;
+ while (!STREAM->at_end()) {
+ EXPECT('^');
+ Type* spec = Type::parse_generic_signature(CHECK_STREAM);
+ throws.append(spec);
+ }
+
+ return new MethodDescriptor(params, outer, parameters, rt, throws);
+}
+
+void MethodDescriptor::bind_variables_to_parameters() {
+ for (int i = 0; i < _type_parameters.length(); ++i) {
+ _type_parameters.at(i)->bind_variables_to_parameters(this, i);
+ }
+ for (int i = 0; i < _parameters.length(); ++i) {
+ _parameters.at(i)->bind_variables_to_parameters(this);
+ }
+ _return_type->bind_variables_to_parameters(this);
+ for (int i = 0; i < _throws.length(); ++i) {
+ _throws.at(i)->bind_variables_to_parameters(this);
+ }
+}
+
+bool MethodDescriptor::covariant_match(MethodDescriptor* other, Context* ctx) {
+
+ if (_parameters.length() == other->_parameters.length()) {
+ for (int i = 0; i < _parameters.length(); ++i) {
+ if (!_parameters.at(i)->covariant_match(other->_parameters.at(i), ctx)) {
+ return false;
+ }
+ }
+
+ if (_return_type->as_primitive() != NULL) {
+ return _return_type->covariant_match(other->_return_type, ctx);
+ } else {
+ // return type is a reference
+ return other->_return_type->as_class() != NULL ||
+ other->_return_type->as_variable() != NULL ||
+ other->_return_type->as_array() != NULL;
+ }
+ } else {
+ return false;
+ }
+}
+
+MethodDescriptor* MethodDescriptor::canonicalize(Context* ctx) {
+
+ GrowableArray<TypeParameter*> type_params(_type_parameters.length());
+ for (int i = 0; i < _type_parameters.length(); ++i) {
+ type_params.append(_type_parameters.at(i)->canonicalize(ctx, 0));
+ }
+
+ ClassDescriptor* outer = _outer_class == NULL ? NULL :
+ _outer_class->canonicalize(ctx);
+
+ GrowableArray<Type*> params(_parameters.length());
+ for (int i = 0; i < _parameters.length(); ++i) {
+ params.append(_parameters.at(i)->canonicalize(ctx, 0));
+ }
+
+ Type* rt = _return_type->canonicalize(ctx, 0);
+
+ GrowableArray<Type*> throws(_throws.length());
+ for (int i = 0; i < _throws.length(); ++i) {
+ throws.append(_throws.at(i)->canonicalize(ctx, 0));
+ }
+
+ return new MethodDescriptor(type_params, outer, params, rt, throws);
+}
+
+#ifndef PRODUCT
+TempNewSymbol MethodDescriptor::reify_signature(Context* ctx, TRAPS) {
+ stringStream ss(256);
+
+ ss.print("(");
+ for (int i = 0; i < _parameters.length(); ++i) {
+ _parameters.at(i)->reify_signature(&ss, ctx);
+ }
+ ss.print(")");
+ _return_type->reify_signature(&ss, ctx);
+ return SymbolTable::new_symbol(ss.base(), (int)ss.size(), THREAD);
+}
+
+void MethodDescriptor::print_on(outputStream* str) const {
+ str->indent().print_cr("MethodDescriptor {");
+ {
+ streamIndentor si(str);
+ if (_type_parameters.length() > 0) {
+ str->indent().print_cr("Formals: {");
+ {
+ streamIndentor si(str);
+ for (int i = 0; i < _type_parameters.length(); ++i) {
+ _type_parameters.at(i)->print_on(str);
+ }
+ }
+ str->indent().print_cr("}");
+ }
+ str->indent().print_cr("Parameters: {");
+ {
+ streamIndentor si(str);
+ for (int i = 0; i < _parameters.length(); ++i) {
+ _parameters.at(i)->print_on(str);
+ }
+ }
+ str->indent().print_cr("}");
+ str->indent().print_cr("Return Type: ");
+ {
+ streamIndentor si(str);
+ _return_type->print_on(str);
+ }
+
+ if (_throws.length() > 0) {
+ str->indent().print_cr("Throws: {");
+ {
+ streamIndentor si(str);
+ for (int i = 0; i < _throws.length(); ++i) {
+ _throws.at(i)->print_on(str);
+ }
+ }
+ str->indent().print_cr("}");
+ }
+ }
+ str->indent().print_cr("}");
+}
+#endif // ndef PRODUCT
+
+TypeParameter* TypeParameter::parse_generic_signature(DescriptorStream* STREAM) {
+ STREAM->set_mark();
+ char c = READ();
+ while (c != ':') {
+ c = READ();
+ }
+
+ Identifier* id = STREAM->identifier_from_mark();
+
+ ClassType* class_bound = NULL;
+ GrowableArray<ClassType*> interface_bounds(8);
+
+ c = READ();
+ if (c != '>') {
+ if (c != ':') {
+ EXPECTED(c, 'L');
+ class_bound = ClassType::parse_generic_signature(CHECK_STREAM);
+ c = READ();
+ }
+
+ while (c == ':') {
+ EXPECT('L');
+ ClassType* fts = ClassType::parse_generic_signature(CHECK_STREAM);
+ interface_bounds.append(fts);
+ c = READ();
+ }
+ }
+ PUSH(c);
+
+ return new TypeParameter(id, class_bound, interface_bounds);
+}
+
+void TypeParameter::bind_variables_to_parameters(Descriptor* sig, int position) {
+ if (_class_bound != NULL) {
+ _class_bound->bind_variables_to_parameters(sig);
+ }
+ for (int i = 0; i < _interface_bounds.length(); ++i) {
+ _interface_bounds.at(i)->bind_variables_to_parameters(sig);
+ }
+ _position = position;
+}
+
+Type* TypeParameter::resolve(
+ Context* ctx, int inner_depth, int ctx_depth) {
+
+ if (inner_depth == -1) {
+ // This indicates that the parameter is a method type parameter, which
+ // isn't resolveable using the class hierarchy context
+ return bound();
+ }
+
+ ClassType* provider = ctx->at_depth(ctx_depth);
+ if (provider != NULL) {
+ for (int i = 0; i < inner_depth && provider != NULL; ++i) {
+ provider = provider->outer_class();
+ }
+ if (provider != NULL) {
+ TypeArgument* arg = provider->type_argument_at(_position);
+ if (arg != NULL) {
+ Type* value = arg->lower_bound();
+ return value->canonicalize(ctx, ctx_depth + 1);
+ }
+ }
+ }
+
+ return bound();
+}
+
+TypeParameter* TypeParameter::canonicalize(Context* ctx, int ctx_depth) {
+ ClassType* bound = _class_bound == NULL ? NULL :
+ _class_bound->canonicalize(ctx, ctx_depth);
+
+ GrowableArray<ClassType*> ifaces(_interface_bounds.length());
+ for (int i = 0; i < _interface_bounds.length(); ++i) {
+ ifaces.append(_interface_bounds.at(i)->canonicalize(ctx, ctx_depth));
+ }
+
+ TypeParameter* ret = new TypeParameter(_identifier, bound, ifaces);
+ ret->_position = _position;
+ return ret;
+}
+
+ClassType* TypeParameter::bound() {
+ if (_class_bound != NULL) {
+ return _class_bound;
+ }
+
+ if (_interface_bounds.length() == 1) {
+ return _interface_bounds.at(0);
+ }
+
+ return ClassType::java_lang_Object(); // TODO: investigate this case
+}
+
+#ifndef PRODUCT
+void TypeParameter::print_on(outputStream* str) const {
+ str->indent().print_cr("Formal: {");
+ {
+ streamIndentor si(str);
+
+ str->indent().print("Identifier: ");
+ _identifier->print_on(str);
+ str->print_cr("");
+ if (_class_bound != NULL) {
+ str->indent().print_cr("Class Bound: ");
+ streamIndentor si(str);
+ _class_bound->print_on(str);
+ }
+ if (_interface_bounds.length() > 0) {
+ str->indent().print_cr("Interface Bounds: {");
+ {
+ streamIndentor si(str);
+ for (int i = 0; i < _interface_bounds.length(); ++i) {
+ _interface_bounds.at(i)->print_on(str);
+ }
+ }
+ str->indent().print_cr("}");
+ }
+ str->indent().print_cr("Ordinal Position: %d", _position);
+ }
+ str->indent().print_cr("}");
+}
+#endif // ndef PRODUCT
+
+Type* Type::parse_generic_signature(DescriptorStream* STREAM) {
+ char c = READ();
+ switch (c) {
+ case 'L':
+ return ClassType::parse_generic_signature(CHECK_STREAM);
+ case 'T':
+ return TypeVariable::parse_generic_signature(CHECK_STREAM);
+ case '[':
+ return ArrayType::parse_generic_signature(CHECK_STREAM);
+ default:
+ return new PrimitiveType(c);
+ }
+}
+
+Identifier* ClassType::parse_generic_signature_simple(GrowableArray<TypeArgument*>* args,
+ bool* has_inner, DescriptorStream* STREAM) {
+ STREAM->set_mark();
+
+ char c = READ();
+ while (c != ';' && c != '.' && c != '<') { c = READ(); }
+ Identifier* id = STREAM->identifier_from_mark();
+
+ if (c == '<') {
+ c = READ();
+ while (c != '>') {
+ PUSH(c);
+ TypeArgument* arg = TypeArgument::parse_generic_signature(CHECK_STREAM);
+ args->append(arg);
+ c = READ();
+ }
+ c = READ();
+ }
+
+ *has_inner = (c == '.');
+ if (!(*has_inner)) {
+ EXPECTED(c, ';');
+ }
+
+ return id;
+}
+
+ClassType* ClassType::parse_generic_signature(DescriptorStream* STREAM) {
+ return parse_generic_signature(NULL, CHECK_STREAM);
+}
+
+ClassType* ClassType::parse_generic_signature(ClassType* outer, DescriptorStream* STREAM) {
+ GrowableArray<TypeArgument*> args;
+ ClassType* gct = NULL;
+ bool has_inner = false;
+
+ Identifier* id = parse_generic_signature_simple(&args, &has_inner, STREAM);
+ if (id != NULL) {
+ gct = new ClassType(id, args, outer);
+
+ if (has_inner) {
+ gct = parse_generic_signature(gct, CHECK_STREAM);
+ }
+ }
+ return gct;
+}
+
+ClassType* ClassType::from_symbol(Symbol* sym) {
+ assert(sym != NULL, "Must not be null");
+ GrowableArray<TypeArgument*> args;
+ Identifier* id = new Identifier(sym, 0, sym->utf8_length());
+ return new ClassType(id, args, NULL);
+}
+
+ClassType* ClassType::java_lang_Object() {
+ return from_symbol(vmSymbols::java_lang_Object());
+}
+
+void ClassType::bind_variables_to_parameters(Descriptor* sig) {
+ for (int i = 0; i < _type_arguments.length(); ++i) {
+ _type_arguments.at(i)->bind_variables_to_parameters(sig);
+ }
+ if (_outer_class != NULL) {
+ _outer_class->bind_variables_to_parameters(sig);
+ }
+}
+
+TypeArgument* ClassType::type_argument_at(int i) {
+ if (i >= 0 && i < _type_arguments.length()) {
+ return _type_arguments.at(i);
+ } else {
+ return NULL;
+ }
+}
+
+#ifndef PRODUCT
+void ClassType::reify_signature(stringStream* ss, Context* ctx) {
+ ss->print("L");
+ _identifier->print_on(ss);
+ ss->print(";");
+}
+
+void ClassType::print_on(outputStream* str) const {
+ str->indent().print_cr("Class {");
+ {
+ streamIndentor si(str);
+ str->indent().print("Name: ");
+ _identifier->print_on(str);
+ str->print_cr("");
+ if (_type_arguments.length() != 0) {
+ str->indent().print_cr("Type Arguments: {");
+ {
+ streamIndentor si(str);
+ for (int j = 0; j < _type_arguments.length(); ++j) {
+ _type_arguments.at(j)->print_on(str);
+ }
+ }
+ str->indent().print_cr("}");
+ }
+ if (_outer_class != NULL) {
+ str->indent().print_cr("Outer Class: ");
+ streamIndentor sir(str);
+ _outer_class->print_on(str);
+ }
+ }
+ str->indent().print_cr("}");
+}
+#endif // ndef PRODUCT
+
+bool ClassType::covariant_match(Type* other, Context* ctx) {
+
+ if (other == this) {
+ return true;
+ }
+
+ TypeVariable* variable = other->as_variable();
+ if (variable != NULL) {
+ other = variable->resolve(ctx, 0);
+ }
+
+ ClassType* outer = outer_class();
+ ClassType* other_class = other->as_class();
+
+ if (other_class == NULL ||
+ (outer == NULL) != (other_class->outer_class() == NULL)) {
+ return false;
+ }
+
+ if (!_identifier->equals(other_class->_identifier)) {
+ return false;
+ }
+
+ if (outer != NULL && !outer->covariant_match(other_class->outer_class(), ctx)) {
+ return false;
+ }
+
+ return true;
+}
+
+ClassType* ClassType::canonicalize(Context* ctx, int ctx_depth) {
+
+ GrowableArray<TypeArgument*> args(_type_arguments.length());
+ for (int i = 0; i < _type_arguments.length(); ++i) {
+ args.append(_type_arguments.at(i)->canonicalize(ctx, ctx_depth));
+ }
+
+ ClassType* outer = _outer_class == NULL ? NULL :
+ _outer_class->canonicalize(ctx, ctx_depth);
+
+ return new ClassType(_identifier, args, outer);
+}
+
+TypeVariable* TypeVariable::parse_generic_signature(DescriptorStream* STREAM) {
+ STREAM->set_mark();
+ char c = READ();
+ while (c != ';') {
+ c = READ();
+ }
+ Identifier* id = STREAM->identifier_from_mark();
+
+ return new TypeVariable(id);
+}
+
+void TypeVariable::bind_variables_to_parameters(Descriptor* sig) {
+ _parameter = sig->find_type_parameter(_id, &_inner_depth);
+ if (VerifyGenericSignatures && _parameter == NULL) {
+ fatal("Could not find formal parameter");
+ }
+}
+
+Type* TypeVariable::resolve(Context* ctx, int ctx_depth) {
+ if (parameter() != NULL) {
+ return parameter()->resolve(ctx, inner_depth(), ctx_depth);
+ } else {
+ if (VerifyGenericSignatures) {
+ fatal("Type variable matches no parameter");
+ }
+ return NULL;
+ }
+}
+
+bool TypeVariable::covariant_match(Type* other, Context* ctx) {
+
+ if (other == this) {
+ return true;
+ }
+
+ Context my_context(NULL); // empty, results in erasure
+ Type* my_type = resolve(&my_context, 0);
+ if (my_type == NULL) {
+ return false;
+ }
+
+ return my_type->covariant_match(other, ctx);
+}
+
+Type* TypeVariable::canonicalize(Context* ctx, int ctx_depth) {
+ return resolve(ctx, ctx_depth);
+}
+
+#ifndef PRODUCT
+void TypeVariable::reify_signature(stringStream* ss, Context* ctx) {
+ Type* type = resolve(ctx, 0);
+ if (type != NULL) {
+ type->reify_signature(ss, ctx);
+ }
+}
+
+void TypeVariable::print_on(outputStream* str) const {
+ str->indent().print_cr("Type Variable {");
+ {
+ streamIndentor si(str);
+ str->indent().print("Name: ");
+ _id->print_on(str);
+ str->print_cr("");
+ str->indent().print_cr("Inner depth: %d", _inner_depth);
+ }
+ str->indent().print_cr("}");
+}
+#endif // ndef PRODUCT
+
+ArrayType* ArrayType::parse_generic_signature(DescriptorStream* STREAM) {
+ Type* base = Type::parse_generic_signature(CHECK_STREAM);
+ return new ArrayType(base);
+}
+
+void ArrayType::bind_variables_to_parameters(Descriptor* sig) {
+ assert(_base != NULL, "Invalid base");
+ _base->bind_variables_to_parameters(sig);
+}
+
+bool ArrayType::covariant_match(Type* other, Context* ctx) {
+ assert(_base != NULL, "Invalid base");
+
+ if (other == this) {
+ return true;
+ }
+
+ ArrayType* other_array = other->as_array();
+ return (other_array != NULL && _base->covariant_match(other_array->_base, ctx));
+}
+
+ArrayType* ArrayType::canonicalize(Context* ctx, int ctx_depth) {
+ assert(_base != NULL, "Invalid base");
+ return new ArrayType(_base->canonicalize(ctx, ctx_depth));
+}
+
+#ifndef PRODUCT
+void ArrayType::reify_signature(stringStream* ss, Context* ctx) {
+ assert(_base != NULL, "Invalid base");
+ ss->print("[");
+ _base->reify_signature(ss, ctx);
+}
+
+void ArrayType::print_on(outputStream* str) const {
+ str->indent().print_cr("Array {");
+ {
+ streamIndentor si(str);
+ _base->print_on(str);
+ }
+ str->indent().print_cr("}");
+}
+#endif // ndef PRODUCT
+
+bool PrimitiveType::covariant_match(Type* other, Context* ctx) {
+
+ PrimitiveType* other_prim = other->as_primitive();
+ return (other_prim != NULL && _type == other_prim->_type);
+}
+
+PrimitiveType* PrimitiveType::canonicalize(Context* ctx, int ctxd) {
+ return this;
+}
+
+#ifndef PRODUCT
+void PrimitiveType::reify_signature(stringStream* ss, Context* ctx) {
+ ss->print("%c", _type);
+}
+
+void PrimitiveType::print_on(outputStream* str) const {
+ str->indent().print_cr("Primitive: '%c'", _type);
+}
+#endif // ndef PRODUCT
+
+void PrimitiveType::bind_variables_to_parameters(Descriptor* sig) {
+}
+
+TypeArgument* TypeArgument::parse_generic_signature(DescriptorStream* STREAM) {
+ char c = READ();
+ Type* type = NULL;
+
+ switch (c) {
+ case '*':
+ return new TypeArgument(ClassType::java_lang_Object(), NULL);
+ break;
+ default:
+ PUSH(c);
+ // fall-through
+ case '+':
+ case '-':
+ type = Type::parse_generic_signature(CHECK_STREAM);
+ if (c == '+') {
+ return new TypeArgument(type, NULL);
+ } else if (c == '-') {
+ return new TypeArgument(ClassType::java_lang_Object(), type);
+ } else {
+ return new TypeArgument(type, type);
+ }
+ }
+}
+
+void TypeArgument::bind_variables_to_parameters(Descriptor* sig) {
+ assert(_lower_bound != NULL, "Invalid lower bound");
+ _lower_bound->bind_variables_to_parameters(sig);
+ if (_upper_bound != NULL && _upper_bound != _lower_bound) {
+ _upper_bound->bind_variables_to_parameters(sig);
+ }
+}
+
+bool TypeArgument::covariant_match(TypeArgument* other, Context* ctx) {
+ assert(_lower_bound != NULL, "Invalid lower bound");
+
+ if (other == this) {
+ return true;
+ }
+
+ if (!_lower_bound->covariant_match(other->lower_bound(), ctx)) {
+ return false;
+ }
+ return true;
+}
+
+TypeArgument* TypeArgument::canonicalize(Context* ctx, int ctx_depth) {
+ assert(_lower_bound != NULL, "Invalid lower bound");
+ Type* lower = _lower_bound->canonicalize(ctx, ctx_depth);
+ Type* upper = NULL;
+
+ if (_upper_bound == _lower_bound) {
+ upper = lower;
+ } else if (_upper_bound != NULL) {
+ upper = _upper_bound->canonicalize(ctx, ctx_depth);
+ }
+
+ return new TypeArgument(lower, upper);
+}
+
+#ifndef PRODUCT
+void TypeArgument::print_on(outputStream* str) const {
+ str->indent().print_cr("TypeArgument {");
+ {
+ streamIndentor si(str);
+ if (_lower_bound != NULL) {
+ str->indent().print("Lower bound: ");
+ _lower_bound->print_on(str);
+ }
+ if (_upper_bound != NULL) {
+ str->indent().print("Upper bound: ");
+ _upper_bound->print_on(str);
+ }
+ }
+ str->indent().print_cr("}");
+}
+#endif // ndef PRODUCT
+
+void Context::Mark::destroy() {
+ if (is_active()) {
+ _context->reset_to_mark(_marked_size);
+ }
+ deactivate();
+}
+
+void Context::apply_type_arguments(
+ InstanceKlass* current, InstanceKlass* super, TRAPS) {
+ assert(_cache != NULL, "Cannot use an empty context");
+ ClassType* spec = NULL;
+ if (current != NULL) {
+ ClassDescriptor* descriptor = _cache->descriptor_for(current, CHECK);
+ if (super == current->super()) {
+ spec = descriptor->super();
+ } else {
+ spec = descriptor->interface_desc(super->name());
+ }
+ if (spec != NULL) {
+ _type_arguments.push(spec);
+ }
+ }
+}
+
+void Context::reset_to_mark(int size) {
+ _type_arguments.trunc_to(size);
+}
+
+ClassType* Context::at_depth(int i) const {
+ if (i < _type_arguments.length()) {
+ return _type_arguments.at(_type_arguments.length() - 1 - i);
+ }
+ return NULL;
+}
+
+#ifndef PRODUCT
+void Context::print_on(outputStream* str) const {
+ str->indent().print_cr("Context {");
+ for (int i = 0; i < _type_arguments.length(); ++i) {
+ streamIndentor si(str);
+ str->indent().print("leval %d: ", i);
+ ClassType* ct = at_depth(i);
+ if (ct == NULL) {
+ str->print_cr("<empty>");
+ continue;
+ } else {
+ str->print_cr("{");
+ }
+
+ for (int j = 0; j < ct->type_arguments_length(); ++j) {
+ streamIndentor si(str);
+ TypeArgument* ta = ct->type_argument_at(j);
+ Type* bound = ta->lower_bound();
+ bound->print_on(str);
+ }
+ str->indent().print_cr("}");
+ }
+ str->indent().print_cr("}");
+}
+#endif // ndef PRODUCT
+
+ClassDescriptor* DescriptorCache::descriptor_for(InstanceKlass* ik, TRAPS) {
+
+ ClassDescriptor** existing = _class_descriptors.get(ik);
+ if (existing == NULL) {
+ ClassDescriptor* cd = ClassDescriptor::parse_generic_signature(ik, CHECK_NULL);
+ _class_descriptors.put(ik, cd);
+ return cd;
+ } else {
+ return *existing;
+ }
+}
+
+MethodDescriptor* DescriptorCache::descriptor_for(
+ Method* mh, ClassDescriptor* cd, TRAPS) {
+ assert(mh != NULL && cd != NULL, "Should not be NULL");
+ MethodDescriptor** existing = _method_descriptors.get(mh);
+ if (existing == NULL) {
+ MethodDescriptor* md = MethodDescriptor::parse_generic_signature(mh, cd);
+ _method_descriptors.put(mh, md);
+ return md;
+ } else {
+ return *existing;
+ }
+}
+MethodDescriptor* DescriptorCache::descriptor_for(Method* mh, TRAPS) {
+ ClassDescriptor* cd = descriptor_for(
+ InstanceKlass::cast(mh->method_holder()), CHECK_NULL);
+ return descriptor_for(mh, cd, THREAD);
+}
+
+} // namespace generic
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hotspot/src/share/vm/classfile/genericSignatures.hpp Fri Nov 09 08:20:03 2012 -0800
@@ -0,0 +1,467 @@
+/*
+ * Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ *
+ */
+
+#ifndef SHARE_VM_CLASSFILE_GENERICSIGNATURES_HPP
+#define SHARE_VM_CLASSFILE_GENERICSIGNATURES_HPP
+
+#include "classfile/symbolTable.hpp"
+#include "memory/allocation.hpp"
+#include "runtime/signature.hpp"
+#include "utilities/growableArray.hpp"
+#include "utilities/resourceHash.hpp"
+
+class stringStream;
+
+namespace generic {
+
+class Identifier;
+class ClassDescriptor;
+class MethodDescriptor;
+
+class TypeParameter; // a formal type parameter declared in generic signatures
+class TypeArgument; // The "type value" passed to fill parameters in supertypes
+class TypeVariable; // A usage of a type parameter as a value
+/**
+ * Example:
+ *
+ * <T, V> class Foo extends Bar<String> { int m(V v) {} }
+ * ^^^^^^ ^^^^^^ ^^
+ * type parameters type argument type variable
+ *
+ * Note that a type variable could be passed as an argument too:
+ * <T, V> class Foo extends Bar<T> { int m(V v) {} }
+ * ^^^
+ * type argument's value is a type variable
+ */
+
+
+class Type;
+class ClassType;
+class ArrayType;
+class PrimitiveType;
+class Context;
+class DescriptorCache;
+
+class DescriptorStream;
+
+class Identifier : public ResourceObj {
+ private:
+ Symbol* _sym;
+ int _begin;
+ int _end;
+
+ public:
+ Identifier(Symbol* sym, int begin, int end) :
+ _sym(sym), _begin(begin), _end(end) {}
+
+ bool equals(Identifier* other);
+ bool equals(Symbol* sym);
+
+#ifndef PRODUCT
+ void print_on(outputStream* str) const;
+#endif // ndef PRODUCT
+};
+
+class Descriptor : public ResourceObj {
+ protected:
+ GrowableArray<TypeParameter*> _type_parameters;
+ ClassDescriptor* _outer_class;
+
+ Descriptor(GrowableArray<TypeParameter*>& params,
+ ClassDescriptor* outer)
+ : _type_parameters(params), _outer_class(outer) {}
+
+ public:
+
+ ClassDescriptor* outer_class() { return _outer_class; }
+ void set_outer_class(ClassDescriptor* sig) { _outer_class = sig; }
+
+ virtual ClassDescriptor* as_class_signature() { return NULL; }
+ virtual MethodDescriptor* as_method_signature() { return NULL; }
+
+ bool is_class_signature() { return as_class_signature() != NULL; }
+ bool is_method_signature() { return as_method_signature() != NULL; }
+
+ GrowableArray<TypeParameter*>& type_parameters() {
+ return _type_parameters;
+ }
+
+ TypeParameter* find_type_parameter(Identifier* id, int* param_depth);
+
+ virtual void bind_variables_to_parameters() = 0;
+
+#ifndef PRODUCT
+ virtual void print_on(outputStream* str) const = 0;
+#endif
+};
+
+class ClassDescriptor : public Descriptor {
+ private:
+ ClassType* _super;
+ GrowableArray<ClassType*> _interfaces;
+ MethodDescriptor* _outer_method;
+
+ ClassDescriptor(GrowableArray<TypeParameter*>& ftp, ClassType* scs,
+ GrowableArray<ClassType*>& sis, ClassDescriptor* outer_class = NULL,
+ MethodDescriptor* outer_method = NULL)
+ : Descriptor(ftp, outer_class), _super(scs), _interfaces(sis),
+ _outer_method(outer_method) {}
+
+ static u2 get_outer_class_index(InstanceKlass* k, TRAPS);
+ static ClassDescriptor* parse_generic_signature(Klass* k, Symbol* original_name, TRAPS);
+
+ public:
+
+ virtual ClassDescriptor* as_class_signature() { return this; }
+
+ MethodDescriptor* outer_method() { return _outer_method; }
+ void set_outer_method(MethodDescriptor* m) { _outer_method = m; }
+
+ ClassType* super() { return _super; }
+ ClassType* interface_desc(Symbol* sym);
+
+ static ClassDescriptor* parse_generic_signature(Klass* k, TRAPS);
+ static ClassDescriptor* parse_generic_signature(Symbol* sym);
+
+ // For use in superclass chains in positions where this is no generic info
+ static ClassDescriptor* placeholder(InstanceKlass* klass);
+
+#ifndef PRODUCT
+ void print_on(outputStream* str) const;
+#endif
+
+ ClassDescriptor* canonicalize(Context* ctx);
+
+ // Linking sets the position index in any contained TypeVariable type
+ // to correspond to the location of that identifier in the formal type
+ // parameters.
+ void bind_variables_to_parameters();
+};
+
+class MethodDescriptor : public Descriptor {
+ private:
+ GrowableArray<Type*> _parameters;
+ Type* _return_type;
+ GrowableArray<Type*> _throws;
+
+ MethodDescriptor(GrowableArray<TypeParameter*>& ftp, ClassDescriptor* outer,
+ GrowableArray<Type*>& sigs, Type* rt, GrowableArray<Type*>& throws)
+ : Descriptor(ftp, outer), _parameters(sigs), _return_type(rt),
+ _throws(throws) {}
+
+ public:
+
+ static MethodDescriptor* parse_generic_signature(Method* m, ClassDescriptor* outer);
+ static MethodDescriptor* parse_generic_signature(Symbol* sym, ClassDescriptor* outer);
+
+ MethodDescriptor* as_method_signature() { return this; }
+
+ // Performs generic analysis on the method parameters to determine
+ // if both methods refer to the same argument types.
+ bool covariant_match(MethodDescriptor* other, Context* ctx);
+
+ // Returns a new method descriptor with all generic variables
+ // removed and replaced with whatever is indicated using the Context.
+ MethodDescriptor* canonicalize(Context* ctx);
+
+ void bind_variables_to_parameters();
+
+#ifndef PRODUCT
+ TempNewSymbol reify_signature(Context* ctx, TRAPS);
+ void print_on(outputStream* str) const;
+#endif
+};
+
+class TypeParameter : public ResourceObj {
+ private:
+ Identifier* _identifier;
+ ClassType* _class_bound;
+ GrowableArray<ClassType*> _interface_bounds;
+
+ // The position is the ordinal location of the parameter within the
+ // formal parameter list (excluding outer classes). It is only set for
+ // formal type parameters that are associated with a class -- method
+ // type parameters are left as -1. When resolving a generic variable to
+ // find the actual type, this index is used to access the generic type
+ // argument in the provided context object.
+ int _position; // Assigned during variable linking
+
+ TypeParameter(Identifier* id, ClassType* class_bound,
+ GrowableArray<ClassType*>& interface_bounds) :
+ _identifier(id), _class_bound(class_bound),
+ _interface_bounds(interface_bounds), _position(-1) {}
+
+ public:
+ static TypeParameter* parse_generic_signature(DescriptorStream* str);
+
+ ClassType* bound();
+ int position() { return _position; }
+
+ void bind_variables_to_parameters(Descriptor* sig, int position);
+ Identifier* identifier() { return _identifier; }
+
+ Type* resolve(Context* ctx, int inner_depth, int ctx_depth);
+ TypeParameter* canonicalize(Context* ctx, int ctx_depth);
+
+#ifndef PRODUCT
+ void print_on(outputStream* str) const;
+#endif
+};
+
+class Type : public ResourceObj {
+ public:
+ static Type* parse_generic_signature(DescriptorStream* str);
+
+ virtual ClassType* as_class() { return NULL; }
+ virtual TypeVariable* as_variable() { return NULL; }
+ virtual ArrayType* as_array() { return NULL; }
+ virtual PrimitiveType* as_primitive() { return NULL; }
+
+ virtual bool covariant_match(Type* gt, Context* ctx) = 0;
+ virtual Type* canonicalize(Context* ctx, int ctx_depth) = 0;
+
+ virtual void bind_variables_to_parameters(Descriptor* sig) = 0;
+
+#ifndef PRODUCT
+ virtual void reify_signature(stringStream* ss, Context* ctx) = 0;
+ virtual void print_on(outputStream* str) const = 0;
+#endif
+};
+
+class ClassType : public Type {
+ friend class ClassDescriptor;
+ protected:
+ Identifier* _identifier;
+ GrowableArray<TypeArgument*> _type_arguments;
+ ClassType* _outer_class;
+
+ ClassType(Identifier* identifier,
+ GrowableArray<TypeArgument*>& args,
+ ClassType* outer)
+ : _identifier(identifier), _type_arguments(args), _outer_class(outer) {}
+
+ // Returns true if there are inner classes to read
+ static Identifier* parse_generic_signature_simple(
+ GrowableArray<TypeArgument*>* args,
+ bool* has_inner, DescriptorStream* str);
+
+ static ClassType* parse_generic_signature(ClassType* outer,
+ DescriptorStream* str);
+ static ClassType* from_symbol(Symbol* sym);
+
+ public:
+ ClassType* as_class() { return this; }
+
+ static ClassType* parse_generic_signature(DescriptorStream* str);
+ static ClassType* java_lang_Object();
+
+ Identifier* identifier() { return _identifier; }
+ int type_arguments_length() { return _type_arguments.length(); }
+ TypeArgument* type_argument_at(int i);
+
+ virtual ClassType* outer_class() { return _outer_class; }
+
+ bool covariant_match(Type* gt, Context* ctx);
+ ClassType* canonicalize(Context* ctx, int context_depth);
+
+ void bind_variables_to_parameters(Descriptor* sig);
+
+#ifndef PRODUCT
+ void reify_signature(stringStream* ss, Context* ctx);
+ void print_on(outputStream* str) const;
+#endif
+};
+
+class TypeVariable : public Type {
+ private:
+ Identifier* _id;
+ TypeParameter* _parameter; // assigned during linking
+
+ // how many steps "out" from inner classes, -1 if method
+ int _inner_depth;
+
+ TypeVariable(Identifier* id)
+ : _id(id), _parameter(NULL), _inner_depth(0) {}
+
+ public:
+ TypeVariable* as_variable() { return this; }
+
+ static TypeVariable* parse_generic_signature(DescriptorStream* str);
+
+ Identifier* identifier() { return _id; }
+ TypeParameter* parameter() { return _parameter; }
+ int inner_depth() { return _inner_depth; }
+
+ void bind_variables_to_parameters(Descriptor* sig);
+
+ Type* resolve(Context* ctx, int ctx_depth);
+ bool covariant_match(Type* gt, Context* ctx);
+ Type* canonicalize(Context* ctx, int ctx_depth);
+
+#ifndef PRODUCT
+ void reify_signature(stringStream* ss, Context* ctx);
+ void print_on(outputStream* str) const;
+#endif
+};
+
+class ArrayType : public Type {
+ private:
+ Type* _base;
+
+ ArrayType(Type* base) : _base(base) {}
+
+ public:
+ ArrayType* as_array() { return this; }
+
+ static ArrayType* parse_generic_signature(DescriptorStream* str);
+
+ bool covariant_match(Type* gt, Context* ctx);
+ ArrayType* canonicalize(Context* ctx, int ctx_depth);
+
+ void bind_variables_to_parameters(Descriptor* sig);
+
+#ifndef PRODUCT
+ void reify_signature(stringStream* ss, Context* ctx);
+ void print_on(outputStream* str) const;
+#endif
+};
+
+class PrimitiveType : public Type {
+ friend class Type;
+ private:
+ char _type; // includes V for void
+
+ PrimitiveType(char& type) : _type(type) {}
+
+ public:
+ PrimitiveType* as_primitive() { return this; }
+
+ bool covariant_match(Type* gt, Context* ctx);
+ PrimitiveType* canonicalize(Context* ctx, int ctx_depth);
+
+ void bind_variables_to_parameters(Descriptor* sig);
+
+#ifndef PRODUCT
+ void reify_signature(stringStream* ss, Context* ctx);
+ void print_on(outputStream* str) const;
+#endif
+};
+
+class TypeArgument : public ResourceObj {
+ private:
+ Type* _lower_bound;
+ Type* _upper_bound; // may be null or == _lower_bound
+
+ TypeArgument(Type* lower_bound, Type* upper_bound)
+ : _lower_bound(lower_bound), _upper_bound(upper_bound) {}
+
+ public:
+
+ static TypeArgument* parse_generic_signature(DescriptorStream* str);
+
+ Type* lower_bound() { return _lower_bound; }
+ Type* upper_bound() { return _upper_bound; }
+
+ void bind_variables_to_parameters(Descriptor* sig);
+ TypeArgument* canonicalize(Context* ctx, int ctx_depth);
+
+ bool covariant_match(TypeArgument* a, Context* ctx);
+
+#ifndef PRODUCT
+ void print_on(outputStream* str) const;
+#endif
+};
+
+
+class Context : public ResourceObj {
+ private:
+ DescriptorCache* _cache;
+ GrowableArray<ClassType*> _type_arguments;
+
+ void reset_to_mark(int size);
+
+ public:
+ // When this object goes out of scope or 'destroy' is
+ // called, then the application of the type to the
+ // context is wound-back (unless it's been deactivated).
+ class Mark : public StackObj {
+ private:
+ mutable Context* _context;
+ int _marked_size;
+
+ bool is_active() const { return _context != NULL; }
+ void deactivate() const { _context = NULL; }
+
+ public:
+ Mark() : _context(NULL), _marked_size(0) {}
+ Mark(Context* ctx, int sz) : _context(ctx), _marked_size(sz) {}
+ Mark(const Mark& m) : _context(m._context), _marked_size(m._marked_size) {
+ m.deactivate(); // Ownership is transferred
+ }
+
+ Mark& operator=(const Mark& cm) {
+ destroy();
+ _context = cm._context;
+ _marked_size = cm._marked_size;
+ cm.deactivate();
+ return *this;
+ }
+
+ void destroy();
+ ~Mark() { destroy(); }
+ };
+
+ Context(DescriptorCache* cache) : _cache(cache) {}
+
+ Mark mark() { return Mark(this, _type_arguments.length()); }
+ void apply_type_arguments(InstanceKlass* current, InstanceKlass* super,TRAPS);
+
+ ClassType* at_depth(int i) const;
+
+#ifndef PRODUCT
+ void print_on(outputStream* str) const;
+#endif
+};
+
+/**
+ * Contains a cache of descriptors for classes and methods so they can be
+ * looked-up instead of reparsing each time they are needed.
+ */
+class DescriptorCache : public ResourceObj {
+ private:
+ ResourceHashtable<InstanceKlass*, ClassDescriptor*> _class_descriptors;
+ ResourceHashtable<Method*, MethodDescriptor*> _method_descriptors;
+
+ public:
+ ClassDescriptor* descriptor_for(InstanceKlass* ikh, TRAPS);
+
+ MethodDescriptor* descriptor_for(Method* mh, ClassDescriptor* cd, TRAPS);
+ // Class descriptor derived from method holder
+ MethodDescriptor* descriptor_for(Method* mh, TRAPS);
+};
+
+} // namespace generic
+
+#endif // SHARE_VM_CLASSFILE_GENERICSIGNATURES_HPP
+
--- a/hotspot/src/share/vm/classfile/javaClasses.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/classfile/javaClasses.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -1156,7 +1156,7 @@
// Print stack trace element to resource allocated buffer
char* java_lang_Throwable::print_stack_element_to_buffer(Method* method, int bci) {
// Get strings and string lengths
- InstanceKlass* klass = InstanceKlass::cast(method->method_holder());
+ InstanceKlass* klass = method->method_holder();
const char* klass_name = klass->external_name();
int buf_len = (int)strlen(klass_name);
char* source_file_name;
@@ -1747,14 +1747,14 @@
Handle element = ik->allocate_instance_handle(CHECK_0);
// Fill in class name
ResourceMark rm(THREAD);
- const char* str = InstanceKlass::cast(method->method_holder())->external_name();
+ const char* str = method->method_holder()->external_name();
oop classname = StringTable::intern((char*) str, CHECK_0);
java_lang_StackTraceElement::set_declaringClass(element(), classname);
// Fill in method name
oop methodname = StringTable::intern(method->name(), CHECK_0);
java_lang_StackTraceElement::set_methodName(element(), methodname);
// Fill in source file name
- Symbol* source = InstanceKlass::cast(method->method_holder())->source_file_name();
+ Symbol* source = method->method_holder()->source_file_name();
if (ShowHiddenFrames && source == NULL)
source = vmSymbols::unknown_class_name();
oop filename = StringTable::intern(source, CHECK_0);
--- a/hotspot/src/share/vm/classfile/systemDictionary.hpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/classfile/systemDictionary.hpp Fri Nov 09 08:20:03 2012 -0800
@@ -137,6 +137,7 @@
/* NOTE: needed too early in bootstrapping process to have checks based on JDK version */ \
/* Universe::is_gte_jdk14x_version() is not set up by this point. */ \
/* It's okay if this turns out to be NULL in non-1.4 JDKs. */ \
+ do_klass(lambda_MagicLambdaImpl_klass, java_lang_invoke_MagicLambdaImpl, Opt ) \
do_klass(reflect_MagicAccessorImpl_klass, sun_reflect_MagicAccessorImpl, Opt ) \
do_klass(reflect_MethodAccessorImpl_klass, sun_reflect_MethodAccessorImpl, Opt_Only_JDK14NewRef) \
do_klass(reflect_ConstructorAccessorImpl_klass, sun_reflect_ConstructorAccessorImpl, Opt_Only_JDK14NewRef) \
--- a/hotspot/src/share/vm/classfile/verifier.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/classfile/verifier.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -446,7 +446,7 @@
bytecode_name = "<illegal>";
}
}
- InstanceKlass* ik = InstanceKlass::cast(method->method_holder());
+ InstanceKlass* ik = method->method_holder();
ss->indent().print_cr("Location:");
streamIndentor si2(ss);
ss->indent().print_cr("%s.%s%s @%d: %s",
@@ -555,9 +555,10 @@
if (was_recursively_verified()) return;
Method* m = methods->at(index);
- if (m->is_native() || m->is_abstract()) {
+ if (m->is_native() || m->is_abstract() || m->is_overpass()) {
// If m is native or abstract, skip it. It is checked in class file
- // parser that methods do not override a final method.
+ // parser that methods do not override a final method. Overpass methods
+ // are trusted since the VM generates them.
continue;
}
verify_method(methodHandle(THREAD, m), CHECK_VERIFY(this));
@@ -1849,7 +1850,7 @@
if ((index <= 0) || (index >= nconstants)) {
verify_error(ErrorContext::bad_cp_index(bci, index),
"Illegal constant pool index %d in class %s",
- index, InstanceKlass::cast(cp->pool_holder())->external_name());
+ index, cp->pool_holder()->external_name());
return;
}
}
@@ -1868,7 +1869,7 @@
if ((types & (1 << tag)) == 0) {
verify_error(ErrorContext::bad_cp_index(bci, index),
"Illegal type at constant pool entry %d in class %s",
- index, InstanceKlass::cast(cp->pool_holder())->external_name());
+ index, cp->pool_holder()->external_name());
return;
}
}
@@ -1880,7 +1881,7 @@
if (!tag.is_klass() && !tag.is_unresolved_klass()) {
verify_error(ErrorContext::bad_cp_index(bci, index),
"Illegal type at constant pool entry %d in class %s",
- index, InstanceKlass::cast(cp->pool_holder())->external_name());
+ index, cp->pool_holder()->external_name());
return;
}
}
@@ -2304,11 +2305,21 @@
// Make sure the constant pool item is the right type
u2 index = bcs->get_index_u2();
Bytecodes::Code opcode = bcs->raw_code();
- unsigned int types = (opcode == Bytecodes::_invokeinterface
- ? 1 << JVM_CONSTANT_InterfaceMethodref
- : opcode == Bytecodes::_invokedynamic
- ? 1 << JVM_CONSTANT_InvokeDynamic
- : 1 << JVM_CONSTANT_Methodref);
+ unsigned int types;
+ switch (opcode) {
+ case Bytecodes::_invokeinterface:
+ types = 1 << JVM_CONSTANT_InterfaceMethodref;
+ break;
+ case Bytecodes::_invokedynamic:
+ types = 1 << JVM_CONSTANT_InvokeDynamic;
+ break;
+ case Bytecodes::_invokespecial:
+ types = (1 << JVM_CONSTANT_InterfaceMethodref) |
+ (1 << JVM_CONSTANT_Methodref);
+ break;
+ default:
+ types = 1 << JVM_CONSTANT_Methodref;
+ }
verify_cp_type(bcs->bci(), index, cp, types, CHECK_VERIFY(this));
// Get method name and signature
--- a/hotspot/src/share/vm/classfile/vmSymbols.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/classfile/vmSymbols.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -507,7 +507,7 @@
}
void vmIntrinsics::verify_method(ID actual_id, Method* m) {
- Symbol* mk = Klass::cast(m->method_holder())->name();
+ Symbol* mk = m->method_holder()->name();
ID declared_id = match_method_with_klass(m, mk);
if (declared_id == actual_id) return; // success
--- a/hotspot/src/share/vm/classfile/vmSymbols.hpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/classfile/vmSymbols.hpp Fri Nov 09 08:20:03 2012 -0800
@@ -259,6 +259,7 @@
template(java_lang_invoke_DontInline_signature, "Ljava/lang/invoke/DontInline;") \
template(java_lang_invoke_LambdaForm_Compiled_signature, "Ljava/lang/invoke/LambdaForm$Compiled;") \
template(java_lang_invoke_LambdaForm_Hidden_signature, "Ljava/lang/invoke/LambdaForm$Hidden;") \
+ template(java_lang_invoke_MagicLambdaImpl, "java/lang/invoke/MagicLambdaImpl") \
/* internal up-calls made only by the JVM, via class sun.invoke.MethodHandleNatives: */ \
template(findMethodHandleType_name, "findMethodHandleType") \
template(findMethodHandleType_signature, "(Ljava/lang/Class;[Ljava/lang/Class;)Ljava/lang/invoke/MethodType;") \
--- a/hotspot/src/share/vm/code/compiledIC.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/code/compiledIC.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -191,8 +191,8 @@
int index = klassItable::compute_itable_index(call_info->resolved_method()());
entry = VtableStubs::create_stub(false, index, method());
assert(entry != NULL, "entry not computed");
- Klass* k = call_info->resolved_method()->method_holder();
- assert(Klass::cast(k)->is_interface(), "sanity check");
+ InstanceKlass* k = call_info->resolved_method()->method_holder();
+ assert(k->is_interface(), "sanity check");
InlineCacheBuffer::create_transition_stub(this, k, entry);
} else {
// Can be different than method->vtable_index(), due to package-private etc.
--- a/hotspot/src/share/vm/code/dependencies.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/code/dependencies.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -829,7 +829,7 @@
}
if ( !Dependencies::is_concrete_method(lm)
&& !Dependencies::is_concrete_method(m)
- && Klass::cast(lm->method_holder())->is_subtype_of(m->method_holder()))
+ && lm->method_holder()->is_subtype_of(m->method_holder()))
// Method m is overridden by lm, but both are non-concrete.
return true;
}
@@ -1160,7 +1160,11 @@
// We could also return false if m does not yet appear to be
// executed, if the VM version supports this distinction also.
- return !m->is_abstract();
+ return !m->is_abstract() &&
+ !InstanceKlass::cast(m->method_holder())->is_interface();
+ // TODO: investigate whether default methods should be
+ // considered as "concrete" in this situation. For now they
+ // are not.
}
--- a/hotspot/src/share/vm/code/nmethod.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/code/nmethod.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -1263,7 +1263,7 @@
assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod");
// Remove from list of active nmethods
if (method() != NULL)
- InstanceKlass::cast(method()->method_holder())->remove_osr_nmethod(this);
+ method()->method_holder()->remove_osr_nmethod(this);
// Set entry as invalid
_entry_bci = InvalidOSREntryBci;
}
--- a/hotspot/src/share/vm/compiler/compileBroker.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/compiler/compileBroker.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -1051,7 +1051,7 @@
guarantee(!method->is_abstract(), "cannot compile abstract methods");
assert(method->method_holder()->oop_is_instance(),
"sanity check");
- assert(!InstanceKlass::cast(method->method_holder())->is_not_initialized(),
+ assert(!method->method_holder()->is_not_initialized(),
"method holder must be initialized");
assert(!method->is_method_handle_intrinsic(), "do not enqueue these guys");
@@ -1206,7 +1206,7 @@
assert(method->method_holder()->oop_is_instance(), "not an instance method");
assert(osr_bci == InvocationEntryBci || (0 <= osr_bci && osr_bci < method->code_size()), "bci out of range");
assert(!method->is_abstract() && (osr_bci == InvocationEntryBci || !method->is_native()), "cannot compile abstract/native methods");
- assert(!InstanceKlass::cast(method->method_holder())->is_not_initialized(), "method holder must be initialized");
+ assert(!method->method_holder()->is_not_initialized(), "method holder must be initialized");
if (!TieredCompilation) {
comp_level = CompLevel_highest_tier;
--- a/hotspot/src/share/vm/compiler/compilerOracle.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/compiler/compilerOracle.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -67,7 +67,7 @@
// utility method
MethodMatcher* find(methodHandle method) {
- Symbol* class_name = Klass::cast(method->method_holder())->name();
+ Symbol* class_name = method->method_holder()->name();
Symbol* method_name = method->name();
for (MethodMatcher* current = this; current != NULL; current = current->_next) {
if (match(class_name, current->class_name(), current->_class_mode) &&
@@ -624,7 +624,7 @@
assert(has_command_file(), "command file must be specified");
fileStream stream(fopen(cc_file(), "at"));
stream.print("exclude ");
- Klass::cast(method->method_holder())->name()->print_symbol_on(&stream);
+ method->method_holder()->name()->print_symbol_on(&stream);
stream.print(".");
method->name()->print_symbol_on(&stream);
method->signature()->print_symbol_on(&stream);
--- a/hotspot/src/share/vm/compiler/disassembler.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/compiler/disassembler.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -55,16 +55,18 @@
bool Disassembler::_tried_to_load_library = false;
// This routine is in the shared library:
+Disassembler::decode_func_virtual Disassembler::_decode_instructions_virtual = NULL;
Disassembler::decode_func Disassembler::_decode_instructions = NULL;
static const char hsdis_library_name[] = "hsdis-"HOTSPOT_LIB_ARCH;
-static const char decode_instructions_name[] = "decode_instructions_virtual";
-
+static const char decode_instructions_virtual_name[] = "decode_instructions_virtual";
+static const char decode_instructions_name[] = "decode_instructions";
+static bool use_new_version = true;
#define COMMENT_COLUMN 40 LP64_ONLY(+8) /*could be an option*/
#define BYTES_COMMENT ";..." /* funky byte display comment */
bool Disassembler::load_library() {
- if (_decode_instructions != NULL) {
+ if (_decode_instructions_virtual != NULL || _decode_instructions != NULL) {
// Already succeeded.
return true;
}
@@ -123,11 +125,19 @@
_library = os::dll_load(buf, ebuf, sizeof ebuf);
}
if (_library != NULL) {
+ _decode_instructions_virtual = CAST_TO_FN_PTR(Disassembler::decode_func_virtual,
+ os::dll_lookup(_library, decode_instructions_virtual_name));
+ }
+ if (_decode_instructions_virtual == NULL) {
+ // could not spot in new version, try old version
_decode_instructions = CAST_TO_FN_PTR(Disassembler::decode_func,
os::dll_lookup(_library, decode_instructions_name));
+ use_new_version = false;
+ } else {
+ use_new_version = true;
}
_tried_to_load_library = true;
- if (_decode_instructions == NULL) {
+ if (_decode_instructions_virtual == NULL && _decode_instructions == NULL) {
tty->print_cr("Could not load %s; %s; %s", buf,
((_library != NULL)
? "entry point is missing"
@@ -450,17 +460,31 @@
// This is mainly for debugging the library itself.
FILE* out = stdout;
FILE* xmlout = (_print_raw > 1 ? out : NULL);
- return (address)
- (*Disassembler::_decode_instructions)((uintptr_t)start, (uintptr_t)end,
- start, end - start,
+ return use_new_version ?
+ (address)
+ (*Disassembler::_decode_instructions_virtual)((uintptr_t)start, (uintptr_t)end,
+ start, end - start,
+ NULL, (void*) xmlout,
+ NULL, (void*) out,
+ options(), 0/*nice new line*/)
+ :
+ (address)
+ (*Disassembler::_decode_instructions)(start, end,
NULL, (void*) xmlout,
NULL, (void*) out,
options());
}
- return (address)
- (*Disassembler::_decode_instructions)((uintptr_t)start, (uintptr_t)end,
- start, end - start,
+ return use_new_version ?
+ (address)
+ (*Disassembler::_decode_instructions_virtual)((uintptr_t)start, (uintptr_t)end,
+ start, end - start,
+ &event_to_env, (void*) this,
+ &printf_to_env, (void*) this,
+ options(), 0/*nice new line*/)
+ :
+ (address)
+ (*Disassembler::_decode_instructions)(start, end,
&event_to_env, (void*) this,
&printf_to_env, (void*) this,
options());
--- a/hotspot/src/share/vm/compiler/disassembler.hpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/compiler/disassembler.hpp Fri Nov 09 08:20:03 2012 -0800
@@ -49,18 +49,27 @@
friend class decode_env;
private:
// this is the type of the dll entry point:
- typedef void* (*decode_func)(uintptr_t start_va, uintptr_t end_va,
+ typedef void* (*decode_func_virtual)(uintptr_t start_va, uintptr_t end_va,
unsigned char* buffer, uintptr_t length,
void* (*event_callback)(void*, const char*, void*),
void* event_stream,
int (*printf_callback)(void*, const char*, ...),
void* printf_stream,
+ const char* options,
+ int newline);
+ // this is the type of the dll entry point for old version:
+ typedef void* (*decode_func)(void* start_va, void* end_va,
+ void* (*event_callback)(void*, const char*, void*),
+ void* event_stream,
+ int (*printf_callback)(void*, const char*, ...),
+ void* printf_stream,
const char* options);
// points to the library.
static void* _library;
// bailout
static bool _tried_to_load_library;
// points to the decode function.
+ static decode_func_virtual _decode_instructions_virtual;
static decode_func _decode_instructions;
// tries to load library and return whether it succedded.
static bool load_library();
@@ -85,7 +94,9 @@
public:
static bool can_decode() {
- return (_decode_instructions != NULL) || load_library();
+ return (_decode_instructions_virtual != NULL) ||
+ (_decode_instructions != NULL) ||
+ load_library();
}
static void decode(CodeBlob *cb, outputStream* st = NULL);
static void decode(nmethod* nm, outputStream* st = NULL);
--- a/hotspot/src/share/vm/interpreter/interpreterRuntime.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/interpreter/interpreterRuntime.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -733,12 +733,7 @@
get_index_u2_cpcache(thread, bytecode), bytecode, CHECK);
} // end JvmtiHideSingleStepping
- cache_entry(thread)->set_method_handle(
- pool,
- info.resolved_method(),
- info.resolved_appendix(),
- info.resolved_method_type(),
- pool->resolved_references());
+ cache_entry(thread)->set_method_handle(pool, info);
}
IRT_END
@@ -762,12 +757,7 @@
} // end JvmtiHideSingleStepping
ConstantPoolCacheEntry* cp_cache_entry = pool->invokedynamic_cp_cache_entry_at(index);
- cp_cache_entry->set_dynamic_call(
- pool,
- info.resolved_method(),
- info.resolved_appendix(),
- info.resolved_method_type(),
- pool->resolved_references());
+ cp_cache_entry->set_dynamic_call(pool, info);
}
IRT_END
--- a/hotspot/src/share/vm/interpreter/linkResolver.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/interpreter/linkResolver.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -23,6 +23,7 @@
*/
#include "precompiled.hpp"
+#include "classfile/defaultMethods.hpp"
#include "classfile/systemDictionary.hpp"
#include "classfile/vmSymbols.hpp"
#include "compiler/compileBroker.hpp"
@@ -132,7 +133,7 @@
// don't force compilation, resolve was on behalf of compiler
return;
}
- if (InstanceKlass::cast(selected_method->method_holder())->is_not_initialized()) {
+ if (selected_method->method_holder()->is_not_initialized()) {
// 'is_not_initialized' means not only '!is_initialized', but also that
// initialization has not been started yet ('!being_initialized')
// Do not force compilation of methods in uninitialized classes.
@@ -404,21 +405,13 @@
Symbol* method_name, Symbol* method_signature,
KlassHandle current_klass, bool check_access, TRAPS) {
- // 1. check if klass is not interface
- if (resolved_klass->is_interface()) {
- ResourceMark rm(THREAD);
- char buf[200];
- jio_snprintf(buf, sizeof(buf), "Found interface %s, but class was expected", Klass::cast(resolved_klass())->external_name());
- THROW_MSG(vmSymbols::java_lang_IncompatibleClassChangeError(), buf);
- }
-
Handle nested_exception;
- // 2. lookup method in resolved klass and its super klasses
+ // 1. lookup method in resolved klass and its super klasses
lookup_method_in_klasses(resolved_method, resolved_klass, method_name, method_signature, CHECK);
if (resolved_method.is_null()) { // not found in the class hierarchy
- // 3. lookup method in all the interfaces implemented by the resolved klass
+ // 2. lookup method in all the interfaces implemented by the resolved klass
lookup_method_in_interfaces(resolved_method, resolved_klass, method_name, method_signature, CHECK);
if (resolved_method.is_null()) {
@@ -432,7 +425,7 @@
}
if (resolved_method.is_null()) {
- // 4. method lookup failed
+ // 3. method lookup failed
ResourceMark rm(THREAD);
THROW_MSG_CAUSE(vmSymbols::java_lang_NoSuchMethodError(),
Method::name_and_sig_as_C_string(Klass::cast(resolved_klass()),
@@ -442,6 +435,15 @@
}
}
+ // 4. check if klass is not interface
+ if (resolved_klass->is_interface() && resolved_method->is_abstract()) {
+ ResourceMark rm(THREAD);
+ char buf[200];
+ jio_snprintf(buf, sizeof(buf), "Found interface %s, but class was expected",
+ resolved_klass()->external_name());
+ THROW_MSG(vmSymbols::java_lang_IncompatibleClassChangeError(), buf);
+ }
+
// 5. check if method is concrete
if (resolved_method->is_abstract() && !resolved_klass->is_abstract()) {
ResourceMark rm(THREAD);
@@ -464,7 +466,7 @@
// check loader constraints
Handle loader (THREAD, InstanceKlass::cast(current_klass())->class_loader());
- Handle class_loader (THREAD, InstanceKlass::cast(resolved_method->method_holder())->class_loader());
+ Handle class_loader (THREAD, resolved_method->method_holder()->class_loader());
{
ResourceMark rm(THREAD);
char* failed_type_name =
@@ -526,7 +528,7 @@
if (check_access) {
HandleMark hm(THREAD);
Handle loader (THREAD, InstanceKlass::cast(current_klass())->class_loader());
- Handle class_loader (THREAD, InstanceKlass::cast(resolved_method->method_holder())->class_loader());
+ Handle class_loader (THREAD, resolved_method->method_holder()->class_loader());
{
ResourceMark rm(THREAD);
char* failed_type_name =
@@ -743,6 +745,27 @@
Symbol* method_name, Symbol* method_signature,
KlassHandle current_klass, bool check_access, TRAPS) {
+ if (resolved_klass->is_interface() && current_klass() != NULL) {
+ // If the target class is a direct interface, treat this as a "super"
+ // default call.
+ //
+ // If the current method is an overpass that happens to call a direct
+ // super-interface's method, then we'll end up rerunning the default method
+ // analysis even though we don't need to, but that's ok since it will end
+ // up with the same answer.
+ InstanceKlass* ik = InstanceKlass::cast(current_klass());
+ Array<Klass*>* interfaces = ik->local_interfaces();
+ int num_interfaces = interfaces->length();
+ for (int index = 0; index < num_interfaces; index++) {
+ if (interfaces->at(index) == resolved_klass()) {
+ Method* method = DefaultMethods::find_super_default(current_klass(),
+ resolved_klass(), method_name, method_signature, CHECK);
+ resolved_method = methodHandle(THREAD, method);
+ return;
+ }
+ }
+ }
+
resolve_method(resolved_method, resolved_klass, method_name, method_signature, current_klass, check_access, CHECK);
// check if method name is <init>, that it is found in same klass as static type
@@ -784,11 +807,17 @@
{ KlassHandle method_klass = KlassHandle(THREAD,
resolved_method->method_holder());
- if (check_access &&
+ const bool direct_calling_default_method =
+ resolved_klass() != NULL && resolved_method() != NULL &&
+ resolved_klass->is_interface() && !resolved_method->is_abstract();
+
+ if (!direct_calling_default_method &&
+ check_access &&
// a) check if ACC_SUPER flag is set for the current class
current_klass->is_super() &&
// b) check if the method class is a superclass of the current class (superclass relation is not reflexive!)
- current_klass->is_subtype_of(method_klass()) && current_klass() != method_klass() &&
+ current_klass->is_subtype_of(method_klass()) &&
+ current_klass() != method_klass() &&
// c) check if the method is not <init>
resolved_method->name() != vmSymbols::object_initializer_name()) {
// Lookup super method
@@ -881,12 +910,12 @@
// Virtual methods cannot be resolved before its klass has been linked, for otherwise the Method*'s
// has not been rewritten, and the vtable initialized.
- assert(InstanceKlass::cast(resolved_method->method_holder())->is_linked(), "must be linked");
+ assert(resolved_method->method_holder()->is_linked(), "must be linked");
// Virtual methods cannot be resolved before its klass has been linked, for otherwise the Method*'s
// has not been rewritten, and the vtable initialized. Make sure to do this after the nullcheck, since
// a missing receiver might result in a bogus lookup.
- assert(InstanceKlass::cast(resolved_method->method_holder())->is_linked(), "must be linked");
+ assert(resolved_method->method_holder()->is_linked(), "must be linked");
// do lookup based on receiver klass using the vtable index
if (resolved_method->method_holder()->is_interface()) { // miranda method
--- a/hotspot/src/share/vm/oops/constMethod.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/oops/constMethod.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -34,29 +34,30 @@
const u2 ConstMethod::UNSET_IDNUM = 0xFFFF;
ConstMethod* ConstMethod::allocate(ClassLoaderData* loader_data,
- int byte_code_size,
- int compressed_line_number_size,
- int localvariable_table_length,
- int exception_table_length,
- int checked_exceptions_length,
- TRAPS) {
+ int byte_code_size,
+ int compressed_line_number_size,
+ int localvariable_table_length,
+ int exception_table_length,
+ int checked_exceptions_length,
+ MethodType method_type,
+ TRAPS) {
int size = ConstMethod::size(byte_code_size,
compressed_line_number_size,
localvariable_table_length,
exception_table_length,
checked_exceptions_length);
return new (loader_data, size, true, THREAD) ConstMethod(
- byte_code_size, compressed_line_number_size,
- localvariable_table_length, exception_table_length,
- checked_exceptions_length, size);
+ byte_code_size, compressed_line_number_size, localvariable_table_length,
+ exception_table_length, checked_exceptions_length, method_type, size);
}
ConstMethod::ConstMethod(int byte_code_size,
- int compressed_line_number_size,
- int localvariable_table_length,
- int exception_table_length,
- int checked_exceptions_length,
- int size) {
+ int compressed_line_number_size,
+ int localvariable_table_length,
+ int exception_table_length,
+ int checked_exceptions_length,
+ MethodType method_type,
+ int size) {
No_Safepoint_Verifier no_safepoint;
set_interpreter_kind(Interpreter::invalid);
@@ -69,6 +70,7 @@
compressed_line_number_size,
localvariable_table_length,
exception_table_length);
+ set_method_type(method_type);
assert(this->size() == size, "wrong size for object");
}
@@ -111,8 +113,7 @@
}
Method* ConstMethod::method() const {
- return InstanceKlass::cast(_constants->pool_holder())->method_with_idnum(
- _method_idnum);
+ return _constants->pool_holder()->method_with_idnum(_method_idnum);
}
// linenumber table - note that length is unknown until decompression,
--- a/hotspot/src/share/vm/oops/constMethod.hpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/oops/constMethod.hpp Fri Nov 09 08:20:03 2012 -0800
@@ -108,12 +108,17 @@
class ConstMethod : public MetaspaceObj {
friend class VMStructs;
+
+public:
+ typedef enum { NORMAL, OVERPASS } MethodType;
+
private:
enum {
_has_linenumber_table = 1,
_has_checked_exceptions = 2,
_has_localvariable_table = 4,
- _has_exception_table = 8
+ _has_exception_table = 8,
+ _is_overpass = 16
};
// Bit vector of signature
@@ -145,19 +150,22 @@
// Constructor
ConstMethod(int byte_code_size,
- int compressed_line_number_size,
- int localvariable_table_length,
- int exception_table_length,
- int checked_exceptions_length,
- int size);
+ int compressed_line_number_size,
+ int localvariable_table_length,
+ int exception_table_length,
+ int checked_exceptions_length,
+ MethodType is_overpass,
+ int size);
public:
+
static ConstMethod* allocate(ClassLoaderData* loader_data,
- int byte_code_size,
- int compressed_line_number_size,
- int localvariable_table_length,
- int exception_table_length,
- int checked_exceptions_length,
- TRAPS);
+ int byte_code_size,
+ int compressed_line_number_size,
+ int localvariable_table_length,
+ int exception_table_length,
+ int checked_exceptions_length,
+ MethodType mt,
+ TRAPS);
bool is_constMethod() const { return true; }
@@ -179,6 +187,19 @@
bool has_exception_handler() const
{ return (_flags & _has_exception_table) != 0; }
+ MethodType method_type() const {
+ return ((_flags & _is_overpass) == 0) ? NORMAL : OVERPASS;
+ }
+
+ void set_method_type(MethodType mt) {
+ if (mt == NORMAL) {
+ _flags &= ~(_is_overpass);
+ } else {
+ _flags |= _is_overpass;
+ }
+ }
+
+
void set_interpreter_kind(int kind) { _interpreter_kind = kind; }
int interpreter_kind(void) const { return _interpreter_kind; }
--- a/hotspot/src/share/vm/oops/constantPool.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/oops/constantPool.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -228,7 +228,7 @@
} else {
do_resolve = true;
name = this_oop->unresolved_klass_at(which);
- loader = Handle(THREAD, InstanceKlass::cast(this_oop->pool_holder())->class_loader());
+ loader = Handle(THREAD, this_oop->pool_holder()->class_loader());
}
}
} // unlocking constantPool
@@ -247,7 +247,7 @@
if (do_resolve) {
// this_oop must be unlocked during resolve_or_fail
- oop protection_domain = Klass::cast(this_oop->pool_holder())->protection_domain();
+ oop protection_domain = this_oop->pool_holder()->protection_domain();
Handle h_prot (THREAD, protection_domain);
Klass* k_oop = SystemDictionary::resolve_or_fail(name, loader, h_prot, true, THREAD);
KlassHandle k;
@@ -315,7 +315,7 @@
vframeStream vfst(JavaThread::current());
if (!vfst.at_end()) {
line_number = vfst.method()->line_number_from_bci(vfst.bci());
- Symbol* s = InstanceKlass::cast(vfst.method()->method_holder())->source_file_name();
+ Symbol* s = vfst.method()->method_holder()->source_file_name();
if (s != NULL) {
source_file = s->as_C_string();
}
@@ -325,11 +325,11 @@
// only print something if the classes are different
if (source_file != NULL) {
tty->print("RESOLVE %s %s %s:%d\n",
- InstanceKlass::cast(this_oop->pool_holder())->external_name(),
+ this_oop->pool_holder()->external_name(),
InstanceKlass::cast(k())->external_name(), source_file, line_number);
} else {
tty->print("RESOLVE %s %s\n",
- InstanceKlass::cast(this_oop->pool_holder())->external_name(),
+ this_oop->pool_holder()->external_name(),
InstanceKlass::cast(k())->external_name());
}
}
@@ -339,7 +339,7 @@
// Only updated constant pool - if it is resolved.
do_resolve = this_oop->tag_at(which).is_unresolved_klass();
if (do_resolve) {
- ClassLoaderData* this_key = InstanceKlass::cast(this_oop->pool_holder())->class_loader_data();
+ ClassLoaderData* this_key = this_oop->pool_holder()->class_loader_data();
if (!this_key->is_the_null_class_loader_data()) {
this_key->record_dependency(k(), CHECK_NULL); // Can throw OOM
}
@@ -367,8 +367,8 @@
assert(entry.is_unresolved(), "must be either symbol or klass");
Thread *thread = Thread::current();
Symbol* name = entry.get_symbol();
- oop loader = InstanceKlass::cast(this_oop->pool_holder())->class_loader();
- oop protection_domain = Klass::cast(this_oop->pool_holder())->protection_domain();
+ oop loader = this_oop->pool_holder()->class_loader();
+ oop protection_domain = this_oop->pool_holder()->protection_domain();
Handle h_prot (thread, protection_domain);
Handle h_loader (thread, loader);
Klass* k = SystemDictionary::find(name, h_loader, h_prot, thread);
@@ -409,8 +409,8 @@
} else {
assert(entry.is_unresolved(), "must be either symbol or klass");
Symbol* name = entry.get_symbol();
- oop loader = InstanceKlass::cast(this_oop->pool_holder())->class_loader();
- oop protection_domain = Klass::cast(this_oop->pool_holder())->protection_domain();
+ oop loader = this_oop->pool_holder()->class_loader();
+ oop protection_domain = this_oop->pool_holder()->protection_domain();
Handle h_loader(THREAD, loader);
Handle h_prot (THREAD, protection_domain);
KlassHandle k(THREAD, SystemDictionary::find(name, h_loader, h_prot, THREAD));
@@ -1143,16 +1143,21 @@
int from_oplen = operand_array_length(from_cp->operands());
int old_oplen = operand_array_length(to_cp->operands());
if (from_oplen != 0) {
+ ClassLoaderData* loader_data = to_cp->pool_holder()->class_loader_data();
// append my operands to the target's operands array
if (old_oplen == 0) {
- to_cp->set_operands(from_cp->operands()); // reuse; do not merge
+ // Can't just reuse from_cp's operand list because of deallocation issues
+ int len = from_cp->operands()->length();
+ Array<u2>* new_ops = MetadataFactory::new_array<u2>(loader_data, len, CHECK);
+ Copy::conjoint_memory_atomic(
+ from_cp->operands()->adr_at(0), new_ops->adr_at(0), len * sizeof(u2));
+ to_cp->set_operands(new_ops);
} else {
int old_len = to_cp->operands()->length();
int from_len = from_cp->operands()->length();
int old_off = old_oplen * sizeof(u2);
int from_off = from_oplen * sizeof(u2);
// Use the metaspace for the destination constant pool
- ClassLoaderData* loader_data = to_cp->pool_holder()->class_loader_data();
Array<u2>* new_operands = MetadataFactory::new_array<u2>(loader_data, old_len + from_len, CHECK);
int fillp = 0, len = 0;
// first part of dest
@@ -1785,7 +1790,7 @@
assert(cp_patches->at(index).is_null(),
err_msg("Unused constant pool patch at %d in class file %s",
index,
- InstanceKlass::cast(pool_holder())->external_name()));
+ pool_holder()->external_name()));
}
#endif // ASSERT
}
@@ -1943,7 +1948,7 @@
st->print(" for ");
pool_holder()->print_value_on(st);
if (pool_holder() != NULL) {
- bool extra = (InstanceKlass::cast(pool_holder())->constants() != this);
+ bool extra = (pool_holder()->constants() != this);
if (extra) st->print(" (extra)");
}
if (cache() != NULL) {
--- a/hotspot/src/share/vm/oops/constantPool.hpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/oops/constantPool.hpp Fri Nov 09 08:20:03 2012 -0800
@@ -86,8 +86,8 @@
friend class Universe; // For null constructor
private:
Array<u1>* _tags; // the tag array describing the constant pool's contents
- ConstantPoolCache* _cache; // the cache holding interpreter runtime information
- Klass* _pool_holder; // the corresponding class
+ ConstantPoolCache* _cache; // the cache holding interpreter runtime information
+ InstanceKlass* _pool_holder; // the corresponding class
Array<u2>* _operands; // for variable-sized (InvokeDynamic) nodes, usually empty
// Array of resolved objects from the constant pool and map from resolved
@@ -193,9 +193,9 @@
void set_on_stack(const bool value);
// Klass holding pool
- Klass* pool_holder() const { return _pool_holder; }
- void set_pool_holder(Klass* k) { _pool_holder = k; }
- Klass** pool_holder_addr() { return &_pool_holder; }
+ InstanceKlass* pool_holder() const { return _pool_holder; }
+ void set_pool_holder(InstanceKlass* k) { _pool_holder = k; }
+ InstanceKlass** pool_holder_addr() { return &_pool_holder; }
// Interpreter runtime support
ConstantPoolCache* cache() const { return _cache; }
--- a/hotspot/src/share/vm/oops/cpCache.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/oops/cpCache.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -231,8 +231,8 @@
void ConstantPoolCacheEntry::set_interface_call(methodHandle method, int index) {
- Klass* interf = method->method_holder();
- assert(InstanceKlass::cast(interf)->is_interface(), "must be an interface");
+ InstanceKlass* interf = method->method_holder();
+ assert(interf->is_interface(), "must be an interface");
assert(!method->is_final_method(), "interfaces do not have final methods; cannot link to one here");
set_f1(interf);
set_f2(index);
@@ -243,25 +243,17 @@
}
-void ConstantPoolCacheEntry::set_method_handle(constantPoolHandle cpool,
- methodHandle adapter,
- Handle appendix, Handle method_type,
- objArrayHandle resolved_references) {
- set_method_handle_common(cpool, Bytecodes::_invokehandle, adapter, appendix, method_type, resolved_references);
+void ConstantPoolCacheEntry::set_method_handle(constantPoolHandle cpool, const CallInfo &call_info) {
+ set_method_handle_common(cpool, Bytecodes::_invokehandle, call_info);
}
-void ConstantPoolCacheEntry::set_dynamic_call(constantPoolHandle cpool,
- methodHandle adapter,
- Handle appendix, Handle method_type,
- objArrayHandle resolved_references) {
- set_method_handle_common(cpool, Bytecodes::_invokedynamic, adapter, appendix, method_type, resolved_references);
+void ConstantPoolCacheEntry::set_dynamic_call(constantPoolHandle cpool, const CallInfo &call_info) {
+ set_method_handle_common(cpool, Bytecodes::_invokedynamic, call_info);
}
void ConstantPoolCacheEntry::set_method_handle_common(constantPoolHandle cpool,
Bytecodes::Code invoke_code,
- methodHandle adapter,
- Handle appendix, Handle method_type,
- objArrayHandle resolved_references) {
+ const CallInfo &call_info) {
// NOTE: This CPCE can be the subject of data races.
// There are three words to update: flags, refs[f2], f1 (in that order).
// Writers must store all other values before f1.
@@ -276,6 +268,9 @@
return;
}
+ const methodHandle adapter = call_info.resolved_method();
+ const Handle appendix = call_info.resolved_appendix();
+ const Handle method_type = call_info.resolved_method_type();
const bool has_appendix = appendix.not_null();
const bool has_method_type = method_type.not_null();
@@ -315,6 +310,7 @@
// This allows us to create fewer method oops, while keeping type safety.
//
+ objArrayHandle resolved_references = cpool->resolved_references();
// Store appendix, if any.
if (has_appendix) {
const int appendix_index = f2_as_index() + _indy_resolved_references_appendix_offset;
@@ -421,7 +417,7 @@
if (!(*trace_name_printed)) {
// RC_TRACE_MESG macro has an embedded ResourceMark
RC_TRACE_MESG(("adjust: name=%s",
- Klass::cast(old_method->method_holder())->external_name()));
+ old_method->method_holder()->external_name()));
*trace_name_printed = true;
}
// RC_TRACE macro has an embedded ResourceMark
@@ -449,7 +445,7 @@
if (!(*trace_name_printed)) {
// RC_TRACE_MESG macro has an embedded ResourceMark
RC_TRACE_MESG(("adjust: name=%s",
- Klass::cast(old_method->method_holder())->external_name()));
+ old_method->method_holder()->external_name()));
*trace_name_printed = true;
}
// RC_TRACE macro has an embedded ResourceMark
--- a/hotspot/src/share/vm/oops/cpCache.hpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/oops/cpCache.hpp Fri Nov 09 08:20:03 2012 -0800
@@ -117,6 +117,8 @@
// The fields are volatile so that they are stored in the order written in the
// source code. The _indices field with the bytecode must be written last.
+class CallInfo;
+
class ConstantPoolCacheEntry VALUE_OBJ_CLASS_SPEC {
friend class VMStructs;
friend class constantPoolCacheKlass;
@@ -223,18 +225,12 @@
void set_method_handle(
constantPoolHandle cpool, // holding constant pool (required for locking)
- methodHandle method, // adapter for invokeExact, etc.
- Handle appendix, // stored in refs[f2+0]; could be a java.lang.invoke.MethodType
- Handle method_type, // stored in refs[f2+1]; is a java.lang.invoke.MethodType
- objArrayHandle resolved_references
+ const CallInfo &call_info // Call link information
);
void set_dynamic_call(
constantPoolHandle cpool, // holding constant pool (required for locking)
- methodHandle method, // adapter for this call site
- Handle appendix, // stored in refs[f2+0]; could be a java.lang.invoke.CallSite
- Handle method_type, // stored in refs[f2+1]; is a java.lang.invoke.MethodType
- objArrayHandle resolved_references
+ const CallInfo &call_info // Call link information
);
// Common code for invokedynamic and MH invocations.
@@ -255,10 +251,7 @@
void set_method_handle_common(
constantPoolHandle cpool, // holding constant pool (required for locking)
Bytecodes::Code invoke_code, // _invokehandle or _invokedynamic
- methodHandle adapter, // invoker method (f1)
- Handle appendix, // appendix such as CallSite, MethodType, etc. (refs[f2+0])
- Handle method_type, // MethodType (refs[f2+1])
- objArrayHandle resolved_references
+ const CallInfo &call_info // Call link information
);
// invokedynamic and invokehandle call sites have two entries in the
--- a/hotspot/src/share/vm/oops/instanceKlass.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/oops/instanceKlass.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -743,6 +743,35 @@
}
}
+ if (this_oop->has_default_methods()) {
+ // Step 7.5: initialize any interfaces which have default methods
+ for (int i = 0; i < this_oop->local_interfaces()->length(); ++i) {
+ Klass* iface = this_oop->local_interfaces()->at(i);
+ InstanceKlass* ik = InstanceKlass::cast(iface);
+ if (ik->has_default_methods() && ik->should_be_initialized()) {
+ ik->initialize(THREAD);
+
+ if (HAS_PENDING_EXCEPTION) {
+ Handle e(THREAD, PENDING_EXCEPTION);
+ CLEAR_PENDING_EXCEPTION;
+ {
+ EXCEPTION_MARK;
+ // Locks object, set state, and notify all waiting threads
+ this_oop->set_initialization_state_and_notify(
+ initialization_error, THREAD);
+
+ // ignore any exception thrown, superclass initialization error is
+ // thrown below
+ CLEAR_PENDING_EXCEPTION;
+ }
+ DTRACE_CLASSINIT_PROBE_WAIT(
+ super__failed, InstanceKlass::cast(this_oop()), -1, wait);
+ THROW_OOP(e());
+ }
+ }
+ }
+ }
+
// Step 8
{
assert(THREAD->is_Java_thread(), "non-JavaThread in initialize_impl");
@@ -1252,11 +1281,7 @@
}
#endif
-Method* InstanceKlass::find_method(Symbol* name, Symbol* signature) const {
- return InstanceKlass::find_method(methods(), name, signature);
-}
-
-Method* InstanceKlass::find_method(Array<Method*>* methods, Symbol* name, Symbol* signature) {
+static int binary_search(Array<Method*>* methods, Symbol* name) {
int len = methods->length();
// methods are sorted, so do binary search
int l = 0;
@@ -1267,43 +1292,70 @@
assert(m->is_method(), "must be method");
int res = m->name()->fast_compare(name);
if (res == 0) {
- // found matching name; do linear search to find matching signature
- // first, quick check for common case
- if (m->signature() == signature) return m;
- // search downwards through overloaded methods
- int i;
- for (i = mid - 1; i >= l; i--) {
- Method* m = methods->at(i);
- assert(m->is_method(), "must be method");
- if (m->name() != name) break;
- if (m->signature() == signature) return m;
- }
- // search upwards
- for (i = mid + 1; i <= h; i++) {
- Method* m = methods->at(i);
- assert(m->is_method(), "must be method");
- if (m->name() != name) break;
- if (m->signature() == signature) return m;
- }
- // not found
-#ifdef ASSERT
- int index = linear_search(methods, name, signature);
- assert(index == -1, err_msg("binary search should have found entry %d", index));
-#endif
- return NULL;
+ return mid;
} else if (res < 0) {
l = mid + 1;
} else {
h = mid - 1;
}
}
+ return -1;
+}
+
+Method* InstanceKlass::find_method(Symbol* name, Symbol* signature) const {
+ return InstanceKlass::find_method(methods(), name, signature);
+}
+
+Method* InstanceKlass::find_method(
+ Array<Method*>* methods, Symbol* name, Symbol* signature) {
+ int hit = binary_search(methods, name);
+ if (hit != -1) {
+ Method* m = methods->at(hit);
+ // Do linear search to find matching signature. First, quick check
+ // for common case
+ if (m->signature() == signature) return m;
+ // search downwards through overloaded methods
+ int i;
+ for (i = hit - 1; i >= 0; --i) {
+ Method* m = methods->at(i);
+ assert(m->is_method(), "must be method");
+ if (m->name() != name) break;
+ if (m->signature() == signature) return m;
+ }
+ // search upwards
+ for (i = hit + 1; i < methods->length(); ++i) {
+ Method* m = methods->at(i);
+ assert(m->is_method(), "must be method");
+ if (m->name() != name) break;
+ if (m->signature() == signature) return m;
+ }
+ // not found
#ifdef ASSERT
- int index = linear_search(methods, name, signature);
- assert(index == -1, err_msg("binary search should have found entry %d", index));
+ int index = linear_search(methods, name, signature);
+ assert(index == -1, err_msg("binary search should have found entry %d", index));
#endif
+ }
return NULL;
}
+int InstanceKlass::find_method_by_name(Symbol* name, int* end) {
+ return find_method_by_name(methods(), name, end);
+}
+
+int InstanceKlass::find_method_by_name(
+ Array<Method*>* methods, Symbol* name, int* end_ptr) {
+ assert(end_ptr != NULL, "just checking");
+ int start = binary_search(methods, name);
+ int end = start + 1;
+ if (start != -1) {
+ while (start - 1 >= 0 && (methods->at(start - 1))->name() == name) --start;
+ while (end < methods->length() && (methods->at(end))->name() == name) ++end;
+ *end_ptr = end;
+ return start;
+ }
+ return -1;
+}
+
Method* InstanceKlass::uncached_lookup_method(Symbol* name, Symbol* signature) const {
Klass* klass = const_cast<InstanceKlass*>(this);
while (klass != NULL) {
--- a/hotspot/src/share/vm/oops/instanceKlass.hpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/oops/instanceKlass.hpp Fri Nov 09 08:20:03 2012 -0800
@@ -245,6 +245,10 @@
unsigned char * _cached_class_file_bytes; // JVMTI: cached class file, before retransformable agent modified it in CFLH
jint _cached_class_file_len; // JVMTI: length of above
JvmtiCachedClassFieldMap* _jvmti_cached_class_field_map; // JVMTI: used during heap iteration
+
+ // true if class, superclass, or implemented interfaces have default methods
+ bool _has_default_methods;
+
volatile u2 _idnum_allocated_count; // JNI/JVMTI: increments with the addition of methods, old ids don't change
// Method array.
Array<Method*>* _methods;
@@ -492,6 +496,13 @@
// (returns NULL if not found)
Method* lookup_method_in_all_interfaces(Symbol* name, Symbol* signature) const;
+ // Find method indices by name. If a method with the specified name is
+ // found the index to the first method is returned, and 'end' is filled in
+ // with the index of first non-name-matching method. If no method is found
+ // -1 is returned.
+ int find_method_by_name(Symbol* name, int* end);
+ static int find_method_by_name(Array<Method*>* methods, Symbol* name, int* end);
+
// constant pool
ConstantPool* constants() const { return _constants; }
void set_constants(ConstantPool* c) { _constants = c; }
@@ -592,6 +603,9 @@
return _jvmti_cached_class_field_map;
}
+ bool has_default_methods() const { return _has_default_methods; }
+ void set_has_default_methods(bool b) { _has_default_methods = b; }
+
// for adding methods, ConstMethod::UNSET_IDNUM means no more ids available
inline u2 next_method_idnum();
void set_initial_method_idnum(u2 value) { _idnum_allocated_count = value; }
@@ -728,7 +742,6 @@
GrowableArray<Klass*>* compute_secondary_supers(int num_extra_slots);
bool compute_is_subtype_of(Klass* k);
bool can_be_primary_super_slow() const;
- Klass* java_super() const { return super(); }
int oop_size(oop obj) const { return size_helper(); }
bool oop_is_instance_slow() const { return true; }
@@ -750,6 +763,10 @@
return (InstanceKlass*) k;
}
+ InstanceKlass* java_super() const {
+ return (super() == NULL) ? NULL : cast(super());
+ }
+
// Sizing (in words)
static int header_size() { return align_object_offset(sizeof(InstanceKlass)/HeapWordSize); }
static int size(int vtable_length, int itable_length,
--- a/hotspot/src/share/vm/oops/klassVtable.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/oops/klassVtable.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -54,22 +54,16 @@
// the same name and signature as m), then m is a Miranda method which is
// entered as a public abstract method in C's vtable. From then on it should
// treated as any other public method in C for method over-ride purposes.
-void klassVtable::compute_vtable_size_and_num_mirandas(int &vtable_length,
- int &num_miranda_methods,
- Klass* super,
- Array<Method*>* methods,
- AccessFlags class_flags,
- Handle classloader,
- Symbol* classname,
- Array<Klass*>* local_interfaces,
- TRAPS
- ) {
-
+void klassVtable::compute_vtable_size_and_num_mirandas(
+ int* vtable_length_ret, int* num_new_mirandas,
+ GrowableArray<Method*>* all_mirandas, Klass* super,
+ Array<Method*>* methods, AccessFlags class_flags,
+ Handle classloader, Symbol* classname, Array<Klass*>* local_interfaces,
+ TRAPS) {
No_Safepoint_Verifier nsv;
// set up default result values
- vtable_length = 0;
- num_miranda_methods = 0;
+ int vtable_length = 0;
// start off with super's vtable length
InstanceKlass* sk = (InstanceKlass*)super;
@@ -86,9 +80,12 @@
}
}
+ GrowableArray<Method*> new_mirandas(20);
// compute the number of mirandas methods that must be added to the end
- num_miranda_methods = get_num_mirandas(super, methods, local_interfaces);
- vtable_length += (num_miranda_methods * vtableEntry::size());
+ get_mirandas(&new_mirandas, all_mirandas, super, methods, local_interfaces);
+ *num_new_mirandas = new_mirandas.length();
+
+ vtable_length += *num_new_mirandas * vtableEntry::size();
if (Universe::is_bootstrapping() && vtable_length == 0) {
// array classes don't have their superclass set correctly during
@@ -109,6 +106,8 @@
"bad vtable size for class Object");
assert(vtable_length % vtableEntry::size() == 0, "bad vtable length");
assert(vtable_length >= Universe::base_vtable_size(), "vtable too small");
+
+ *vtable_length_ret = vtable_length;
}
int klassVtable::index_of(Method* m, int len) const {
@@ -191,7 +190,7 @@
}
// add miranda methods; it will also update the value of initialized
- fill_in_mirandas(initialized);
+ fill_in_mirandas(&initialized);
// In class hierarchies where the accessibility is not increasing (i.e., going from private ->
// package_private -> publicprotected), the vtable might actually be smaller than our initial
@@ -249,6 +248,11 @@
return superk;
}
+// Methods that are "effectively" final don't need vtable entries.
+bool method_is_effectively_final(
+ AccessFlags klass_flags, methodHandle target) {
+ return target->is_final() || klass_flags.is_final() && !target->is_overpass();
+}
// Update child's copy of super vtable for overrides
// OR return true if a new vtable entry is required
@@ -269,7 +273,7 @@
return false;
}
- if (klass->is_final() || target_method()->is_final()) {
+ if (method_is_effectively_final(klass->access_flags(), target_method)) {
// a final method never needs a new entry; final methods can be statically
// resolved and they have to be present in the vtable only if they override
// a super's method, in which case they re-use its entry
@@ -303,7 +307,7 @@
if (super_method->name() == name && super_method->signature() == signature) {
// get super_klass for method_holder for the found method
- InstanceKlass* super_klass = InstanceKlass::cast(super_method->method_holder());
+ InstanceKlass* super_klass = super_method->method_holder();
if ((super_klass->is_override(super_method, target_loader, target_classname, THREAD)) ||
((klass->major_version() >= VTABLE_TRANSITIVE_OVERRIDE_VERSION)
@@ -406,7 +410,8 @@
Symbol* classname,
AccessFlags class_flags,
TRAPS) {
- if ((class_flags.is_final() || target_method()->is_final()) ||
+
+ if (method_is_effectively_final(class_flags, target_method) ||
// a final method never needs a new entry; final methods can be statically
// resolved and they have to be present in the vtable only if they override
// a super's method, in which case they re-use its entry
@@ -447,7 +452,7 @@
}
// get the class holding the matching method
// make sure you use that class for is_override
- InstanceKlass* superk = InstanceKlass::cast(super_method->method_holder());
+ InstanceKlass* superk = super_method->method_holder();
// we want only instance method matches
// pretend private methods are not in the super vtable
// since we do override around them: e.g. a.m pub/b.m private/c.m pub,
@@ -502,7 +507,7 @@
// miranda methods are interface methods in a class's vtable
if (mhk->is_interface()) {
- assert(m->is_public() && m->is_abstract(), "should be public and abstract");
+ assert(m->is_public(), "should be public");
assert(ik()->implements_interface(method_holder) , "this class should implement the interface");
assert(is_miranda(m, ik()->methods(), ik()->super()), "should be a miranda_method");
return true;
@@ -532,19 +537,19 @@
return false;
}
-void klassVtable::add_new_mirandas_to_list(GrowableArray<Method*>* list_of_current_mirandas,
- Array<Method*>* current_interface_methods,
- Array<Method*>* class_methods,
- Klass* super) {
+void klassVtable::add_new_mirandas_to_lists(
+ GrowableArray<Method*>* new_mirandas, GrowableArray<Method*>* all_mirandas,
+ Array<Method*>* current_interface_methods, Array<Method*>* class_methods,
+ Klass* super) {
// iterate thru the current interface's method to see if it a miranda
int num_methods = current_interface_methods->length();
for (int i = 0; i < num_methods; i++) {
Method* im = current_interface_methods->at(i);
bool is_duplicate = false;
- int num_of_current_mirandas = list_of_current_mirandas->length();
+ int num_of_current_mirandas = new_mirandas->length();
// check for duplicate mirandas in different interfaces we implement
for (int j = 0; j < num_of_current_mirandas; j++) {
- Method* miranda = list_of_current_mirandas->at(j);
+ Method* miranda = new_mirandas->at(j);
if ((im->name() == miranda->name()) &&
(im->signature() == miranda->signature())) {
is_duplicate = true;
@@ -557,51 +562,47 @@
InstanceKlass *sk = InstanceKlass::cast(super);
// check if it is a duplicate of a super's miranda
if (sk->lookup_method_in_all_interfaces(im->name(), im->signature()) == NULL) {
- list_of_current_mirandas->append(im);
+ new_mirandas->append(im);
+ }
+ if (all_mirandas != NULL) {
+ all_mirandas->append(im);
}
}
}
}
}
-void klassVtable::get_mirandas(GrowableArray<Method*>* mirandas,
+void klassVtable::get_mirandas(GrowableArray<Method*>* new_mirandas,
+ GrowableArray<Method*>* all_mirandas,
Klass* super, Array<Method*>* class_methods,
Array<Klass*>* local_interfaces) {
- assert((mirandas->length() == 0) , "current mirandas must be 0");
+ assert((new_mirandas->length() == 0) , "current mirandas must be 0");
// iterate thru the local interfaces looking for a miranda
int num_local_ifs = local_interfaces->length();
for (int i = 0; i < num_local_ifs; i++) {
InstanceKlass *ik = InstanceKlass::cast(local_interfaces->at(i));
- add_new_mirandas_to_list(mirandas, ik->methods(), class_methods, super);
+ add_new_mirandas_to_lists(new_mirandas, all_mirandas,
+ ik->methods(), class_methods, super);
// iterate thru each local's super interfaces
Array<Klass*>* super_ifs = ik->transitive_interfaces();
int num_super_ifs = super_ifs->length();
for (int j = 0; j < num_super_ifs; j++) {
InstanceKlass *sik = InstanceKlass::cast(super_ifs->at(j));
- add_new_mirandas_to_list(mirandas, sik->methods(), class_methods, super);
+ add_new_mirandas_to_lists(new_mirandas, all_mirandas,
+ sik->methods(), class_methods, super);
}
}
}
-// get number of mirandas
-int klassVtable::get_num_mirandas(Klass* super, Array<Method*>* class_methods, Array<Klass*>* local_interfaces) {
- ResourceMark rm;
- GrowableArray<Method*>* mirandas = new GrowableArray<Method*>(20);
- get_mirandas(mirandas, super, class_methods, local_interfaces);
- return mirandas->length();
-}
-
// fill in mirandas
-void klassVtable::fill_in_mirandas(int& initialized) {
- ResourceMark rm;
- GrowableArray<Method*>* mirandas = new GrowableArray<Method*>(20);
- InstanceKlass *this_ik = ik();
- get_mirandas(mirandas, this_ik->super(), this_ik->methods(), this_ik->local_interfaces());
- int num_mirandas = mirandas->length();
- for (int i = 0; i < num_mirandas; i++) {
- put_method_at(mirandas->at(i), initialized);
- initialized++;
+void klassVtable::fill_in_mirandas(int* initialized) {
+ GrowableArray<Method*> mirandas(20);
+ get_mirandas(&mirandas, NULL, ik()->super(), ik()->methods(),
+ ik()->local_interfaces());
+ for (int i = 0; i < mirandas.length(); i++) {
+ put_method_at(mirandas.at(i), *initialized);
+ ++(*initialized);
}
}
@@ -629,7 +630,7 @@
if (!(*trace_name_printed)) {
// RC_TRACE_MESG macro has an embedded ResourceMark
RC_TRACE_MESG(("adjust: name=%s",
- Klass::cast(old_method->method_holder())->external_name()));
+ old_method->method_holder()->external_name()));
*trace_name_printed = true;
}
// RC_TRACE macro has an embedded ResourceMark
@@ -744,7 +745,7 @@
Method* target = klass->uncached_lookup_method(method_name, method_signature);
while (target != NULL && target->is_static()) {
// continue with recursive lookup through the superclass
- Klass* super = Klass::cast(target->method_holder())->super();
+ Klass* super = target->method_holder()->super();
target = (super == NULL) ? (Method*)NULL : Klass::cast(super)->uncached_lookup_method(method_name, method_signature);
}
if (target == NULL || !target->is_public() || target->is_abstract()) {
@@ -754,7 +755,7 @@
// if checkconstraints requested
methodHandle target_h (THREAD, target); // preserve across gc
if (checkconstraints) {
- Handle method_holder_loader (THREAD, InstanceKlass::cast(target->method_holder())->class_loader());
+ Handle method_holder_loader (THREAD, target->method_holder()->class_loader());
if (method_holder_loader() != interface_loader()) {
ResourceMark rm(THREAD);
char* failed_type_name =
@@ -824,7 +825,7 @@
if (!(*trace_name_printed)) {
// RC_TRACE_MESG macro has an embedded ResourceMark
RC_TRACE_MESG(("adjust: name=%s",
- Klass::cast(old_method->method_holder())->external_name()));
+ old_method->method_holder()->external_name()));
*trace_name_printed = true;
}
// RC_TRACE macro has an embedded ResourceMark
@@ -959,9 +960,9 @@
// m must be a method in an interface
int klassItable::compute_itable_index(Method* m) {
- Klass* intf = m->method_holder();
- assert(InstanceKlass::cast(intf)->is_interface(), "sanity check");
- Array<Method*>* methods = InstanceKlass::cast(intf)->methods();
+ InstanceKlass* intf = m->method_holder();
+ assert(intf->is_interface(), "sanity check");
+ Array<Method*>* methods = intf->methods();
int index = 0;
while(methods->at(index) != m) {
index++;
--- a/hotspot/src/share/vm/oops/klassVtable.hpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/oops/klassVtable.hpp Fri Nov 09 08:20:03 2012 -0800
@@ -84,11 +84,11 @@
bool is_initialized();
// computes vtable length (in words) and the number of miranda methods
- static void compute_vtable_size_and_num_mirandas(int &vtable_length, int &num_miranda_methods,
- Klass* super, Array<Method*>* methods,
- AccessFlags class_flags, Handle classloader,
- Symbol* classname, Array<Klass*>* local_interfaces,
- TRAPS);
+ static void compute_vtable_size_and_num_mirandas(
+ int* vtable_length, int* num_new_mirandas,
+ GrowableArray<Method*>* all_mirandas, Klass* super,
+ Array<Method*>* methods, AccessFlags class_flags, Handle classloader,
+ Symbol* classname, Array<Klass*>* local_interfaces, TRAPS);
// RedefineClasses() API support:
// If any entry of this vtable points to any of old_methods,
@@ -125,12 +125,17 @@
// support for miranda methods
bool is_miranda_entry_at(int i);
- void fill_in_mirandas(int& initialized);
+ void fill_in_mirandas(int* initialized);
static bool is_miranda(Method* m, Array<Method*>* class_methods, Klass* super);
- static void add_new_mirandas_to_list(GrowableArray<Method*>* list_of_current_mirandas, Array<Method*>* current_interface_methods, Array<Method*>* class_methods, Klass* super);
- static void get_mirandas(GrowableArray<Method*>* mirandas, Klass* super, Array<Method*>* class_methods, Array<Klass*>* local_interfaces);
- static int get_num_mirandas(Klass* super, Array<Method*>* class_methods, Array<Klass*>* local_interfaces);
-
+ static void add_new_mirandas_to_lists(
+ GrowableArray<Method*>* new_mirandas,
+ GrowableArray<Method*>* all_mirandas,
+ Array<Method*>* current_interface_methods, Array<Method*>* class_methods,
+ Klass* super);
+ static void get_mirandas(
+ GrowableArray<Method*>* new_mirandas,
+ GrowableArray<Method*>* all_mirandas, Klass* super,
+ Array<Method*>* class_methods, Array<Klass*>* local_interfaces);
void verify_against(outputStream* st, klassVtable* vt, int index);
inline InstanceKlass* ik() const;
--- a/hotspot/src/share/vm/oops/method.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/oops/method.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -35,6 +35,7 @@
#include "memory/generation.hpp"
#include "memory/metadataFactory.hpp"
#include "memory/oopFactory.hpp"
+#include "oops/constMethod.hpp"
#include "oops/methodData.hpp"
#include "oops/method.hpp"
#include "oops/oop.inline.hpp"
@@ -57,22 +58,24 @@
// Implementation of Method
Method* Method::allocate(ClassLoaderData* loader_data,
- int byte_code_size,
- AccessFlags access_flags,
- int compressed_line_number_size,
- int localvariable_table_length,
- int exception_table_length,
- int checked_exceptions_length,
- TRAPS) {
+ int byte_code_size,
+ AccessFlags access_flags,
+ int compressed_line_number_size,
+ int localvariable_table_length,
+ int exception_table_length,
+ int checked_exceptions_length,
+ ConstMethod::MethodType method_type,
+ TRAPS) {
assert(!access_flags.is_native() || byte_code_size == 0,
"native methods should not contain byte codes");
ConstMethod* cm = ConstMethod::allocate(loader_data,
- byte_code_size,
- compressed_line_number_size,
- localvariable_table_length,
- exception_table_length,
- checked_exceptions_length,
- CHECK_NULL);
+ byte_code_size,
+ compressed_line_number_size,
+ localvariable_table_length,
+ exception_table_length,
+ checked_exceptions_length,
+ method_type,
+ CHECK_NULL);
int size = Method::size(access_flags.is_native());
@@ -240,12 +243,12 @@
warning("oopmap should only be accessed by the "
"VM, GC task or CMS threads (or during debugging)");
InterpreterOopMap local_mask;
- InstanceKlass::cast(method_holder())->mask_for(h_this, bci, &local_mask);
+ method_holder()->mask_for(h_this, bci, &local_mask);
local_mask.print();
}
}
#endif
- InstanceKlass::cast(method_holder())->mask_for(h_this, bci, mask);
+ method_holder()->mask_for(h_this, bci, mask);
return;
}
@@ -520,7 +523,7 @@
bool Method::is_final_method() const {
// %%% Should return true for private methods also,
// since there is no way to override them.
- return is_final() || Klass::cast(method_holder())->is_final();
+ return is_final() || method_holder()->is_final();
}
@@ -552,7 +555,7 @@
bool Method::has_valid_initializer_flags() const {
return (is_static() ||
- InstanceKlass::cast(method_holder())->major_version() < 51);
+ method_holder()->major_version() < 51);
}
bool Method::is_static_initializer() const {
@@ -614,7 +617,7 @@
if( constants()->tag_at(klass_index).is_unresolved_klass() ) {
Thread *thread = Thread::current();
Symbol* klass_name = constants()->klass_name_at(klass_index);
- Handle loader(thread, InstanceKlass::cast(method_holder())->class_loader());
+ Handle loader(thread, method_holder()->class_loader());
Handle prot (thread, Klass::cast(method_holder())->protection_domain());
return SystemDictionary::find(klass_name, loader, prot, thread) != NULL;
} else {
@@ -932,7 +935,7 @@
// If method is an interface, we skip it - except if it
// is a miranda method
- if (InstanceKlass::cast(method_holder())->is_interface()) {
+ if (method_holder()->is_interface()) {
// Check that method is not a miranda method
if (ik->lookup_method(name(), signature()) == NULL) {
// No implementation exist - so miranda method
@@ -1017,7 +1020,7 @@
ConstantPool* cp_oop = ConstantPool::allocate(loader_data, cp_length, CHECK_(empty));
cp = constantPoolHandle(THREAD, cp_oop);
}
- cp->set_pool_holder(holder());
+ cp->set_pool_holder(InstanceKlass::cast(holder()));
cp->symbol_at_put(_imcp_invoke_name, name);
cp->symbol_at_put(_imcp_invoke_signature, signature);
cp->set_preresolution();
@@ -1031,7 +1034,7 @@
methodHandle m;
{
Method* m_oop = Method::allocate(loader_data, 0, accessFlags_from(flags_bits),
- 0, 0, 0, 0, CHECK_(empty));
+ 0, 0, 0, 0, ConstMethod::NORMAL, CHECK_(empty));
m = methodHandle(THREAD, m_oop);
}
m->set_constants(cp());
@@ -1083,15 +1086,16 @@
int localvariable_len = m->localvariable_table_length();
int exception_table_len = m->exception_table_length();
- ClassLoaderData* loader_data = m()->method_holder()->class_loader_data();
+ ClassLoaderData* loader_data = m->method_holder()->class_loader_data();
Method* newm_oop = Method::allocate(loader_data,
- new_code_length,
- flags,
- new_compressed_linenumber_size,
- localvariable_len,
- exception_table_len,
- checked_exceptions_len,
- CHECK_(methodHandle()));
+ new_code_length,
+ flags,
+ new_compressed_linenumber_size,
+ localvariable_len,
+ exception_table_len,
+ checked_exceptions_len,
+ m->method_type(),
+ CHECK_(methodHandle()));
methodHandle newm (THREAD, newm_oop);
int new_method_size = newm->method_size();
@@ -1233,8 +1237,8 @@
return false;
}
bool sig_is_loaded = true;
- Handle class_loader(THREAD, InstanceKlass::cast(m->method_holder())->class_loader());
- Handle protection_domain(THREAD, Klass::cast(m->method_holder())->protection_domain());
+ Handle class_loader(THREAD, m->method_holder()->class_loader());
+ Handle protection_domain(THREAD, m->method_holder()->protection_domain());
ResourceMark rm(THREAD);
Symbol* signature = m->signature();
for(SignatureStream ss(signature); !ss.is_done(); ss.next()) {
@@ -1260,8 +1264,8 @@
}
bool Method::has_unloaded_classes_in_signature(methodHandle m, TRAPS) {
- Handle class_loader(THREAD, InstanceKlass::cast(m->method_holder())->class_loader());
- Handle protection_domain(THREAD, Klass::cast(m->method_holder())->protection_domain());
+ Handle class_loader(THREAD, m->method_holder()->class_loader());
+ Handle protection_domain(THREAD, m->method_holder()->protection_domain());
ResourceMark rm(THREAD);
Symbol* signature = m->signature();
for(SignatureStream ss(signature); !ss.is_done(); ss.next()) {
@@ -1468,7 +1472,7 @@
Bytecodes::Code Method::orig_bytecode_at(int bci) const {
- BreakpointInfo* bp = InstanceKlass::cast(method_holder())->breakpoints();
+ BreakpointInfo* bp = method_holder()->breakpoints();
for (; bp != NULL; bp = bp->next()) {
if (bp->match(this, bci)) {
return bp->orig_bytecode();
@@ -1480,7 +1484,7 @@
void Method::set_orig_bytecode_at(int bci, Bytecodes::Code code) {
assert(code != Bytecodes::_breakpoint, "cannot patch breakpoints this way");
- BreakpointInfo* bp = InstanceKlass::cast(method_holder())->breakpoints();
+ BreakpointInfo* bp = method_holder()->breakpoints();
for (; bp != NULL; bp = bp->next()) {
if (bp->match(this, bci)) {
bp->set_orig_bytecode(code);
@@ -1490,7 +1494,7 @@
}
void Method::set_breakpoint(int bci) {
- InstanceKlass* ik = InstanceKlass::cast(method_holder());
+ InstanceKlass* ik = method_holder();
BreakpointInfo *bp = new BreakpointInfo(this, bci);
bp->set_next(ik->breakpoints());
ik->set_breakpoints(bp);
@@ -1499,7 +1503,7 @@
}
static void clear_matches(Method* m, int bci) {
- InstanceKlass* ik = InstanceKlass::cast(m->method_holder());
+ InstanceKlass* ik = m->method_holder();
BreakpointInfo* prev_bp = NULL;
BreakpointInfo* next_bp;
for (BreakpointInfo* bp = ik->breakpoints(); bp != NULL; bp = next_bp) {
@@ -1782,7 +1786,7 @@
bool Method::is_method_id(jmethodID mid) {
Method* m = resolve_jmethod_id(mid);
assert(m != NULL, "should be called with non-null method");
- InstanceKlass* ik = InstanceKlass::cast(m->method_holder());
+ InstanceKlass* ik = m->method_holder();
ClassLoaderData* cld = ik->class_loader_data();
if (cld->jmethod_ids() == NULL) return false;
return (cld->jmethod_ids()->contains((Method**)mid));
--- a/hotspot/src/share/vm/oops/method.hpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/oops/method.hpp Fri Nov 09 08:20:03 2012 -0800
@@ -30,7 +30,6 @@
#include "compiler/oopMap.hpp"
#include "interpreter/invocationCounter.hpp"
#include "oops/annotations.hpp"
-#include "oops/constMethod.hpp"
#include "oops/constantPool.hpp"
#include "oops/instanceKlass.hpp"
#include "oops/oop.hpp"
@@ -104,6 +103,7 @@
class LocalVariableTableElement;
class AdapterHandlerEntry;
class MethodData;
+class ConstMethod;
class Method : public Metadata {
friend class VMStructs;
@@ -158,14 +158,16 @@
// Constructor
Method(ConstMethod* xconst, AccessFlags access_flags, int size);
public:
+
static Method* allocate(ClassLoaderData* loader_data,
- int byte_code_size,
- AccessFlags access_flags,
- int compressed_line_number_size,
- int localvariable_table_length,
- int exception_table_length,
- int checked_exceptions_length,
- TRAPS);
+ int byte_code_size,
+ AccessFlags access_flags,
+ int compressed_line_number_size,
+ int localvariable_table_length,
+ int exception_table_length,
+ int checked_exceptions_length,
+ ConstMethod::MethodType method_type,
+ TRAPS);
Method() { assert(DumpSharedSpaces || UseSharedSpaces, "only for CDS"); }
@@ -207,21 +209,21 @@
// annotations support
AnnotationArray* annotations() const {
- InstanceKlass* ik = InstanceKlass::cast(method_holder());
+ InstanceKlass* ik = method_holder();
if (ik->annotations() == NULL) {
return NULL;
}
return ik->annotations()->get_method_annotations_of(method_idnum());
}
AnnotationArray* parameter_annotations() const {
- InstanceKlass* ik = InstanceKlass::cast(method_holder());
+ InstanceKlass* ik = method_holder();
if (ik->annotations() == NULL) {
return NULL;
}
return ik->annotations()->get_method_parameter_annotations_of(method_idnum());
}
AnnotationArray* annotation_default() const {
- InstanceKlass* ik = InstanceKlass::cast(method_holder());
+ InstanceKlass* ik = method_holder();
if (ik->annotations() == NULL) {
return NULL;
}
@@ -494,7 +496,7 @@
{ return constMethod()->compressed_linenumber_table(); }
// method holder (the Klass* holding this method)
- Klass* method_holder() const { return constants()->pool_holder(); }
+ InstanceKlass* method_holder() const { return constants()->pool_holder(); }
void compute_size_of_parameters(Thread *thread); // word size of parameters (receiver if any + arguments)
Symbol* klass_name() const; // returns the name of the method holder
@@ -695,18 +697,18 @@
// Get this method's jmethodID -- allocate if it doesn't exist
jmethodID jmethod_id() { methodHandle this_h(this);
- return InstanceKlass::get_jmethod_id(InstanceKlass::cast(method_holder()), this_h); }
+ return InstanceKlass::get_jmethod_id(method_holder(), this_h); }
// Lookup the jmethodID for this method. Return NULL if not found.
// NOTE that this function can be called from a signal handler
// (see AsyncGetCallTrace support for Forte Analyzer) and this
// needs to be async-safe. No allocation should be done and
// so handles are not used to avoid deadlock.
- jmethodID find_jmethod_id_or_null() { return InstanceKlass::cast(method_holder())->jmethod_id_or_null(this); }
+ jmethodID find_jmethod_id_or_null() { return method_holder()->jmethod_id_or_null(this); }
// JNI static invoke cached itable index accessors
- int cached_itable_index() { return InstanceKlass::cast(method_holder())->cached_itable_index(method_idnum()); }
- void set_cached_itable_index(int index) { InstanceKlass::cast(method_holder())->set_cached_itable_index(method_idnum(), index); }
+ int cached_itable_index() { return method_holder()->cached_itable_index(method_idnum()); }
+ void set_cached_itable_index(int index) { method_holder()->set_cached_itable_index(method_idnum(), index); }
// Support for inlining of intrinsic methods
vmIntrinsics::ID intrinsic_id() const { return (vmIntrinsics::ID) _intrinsic_id; }
@@ -725,14 +727,18 @@
void set_dont_inline(bool x) { _dont_inline = x; }
bool is_hidden() { return _hidden; }
void set_hidden(bool x) { _hidden = x; }
+ ConstMethod::MethodType method_type() const {
+ return _constMethod->method_type();
+ }
+ bool is_overpass() const { return method_type() == ConstMethod::OVERPASS; }
// On-stack replacement support
bool has_osr_nmethod(int level, bool match_level) {
- return InstanceKlass::cast(method_holder())->lookup_osr_nmethod(this, InvocationEntryBci, level, match_level) != NULL;
+ return method_holder()->lookup_osr_nmethod(this, InvocationEntryBci, level, match_level) != NULL;
}
nmethod* lookup_osr_nmethod_for(int bci, int level, bool match_level) {
- return InstanceKlass::cast(method_holder())->lookup_osr_nmethod(this, bci, level, match_level);
+ return method_holder()->lookup_osr_nmethod(this, bci, level, match_level);
}
// Inline cache support
--- a/hotspot/src/share/vm/opto/escape.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/opto/escape.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -1386,12 +1386,12 @@
// Non-escaped allocation returned from Java or runtime call have
// unknown values in fields.
for (EdgeIterator i(pta); i.has_next(); i.next()) {
- PointsToNode* ptn = i.get();
- if (ptn->is_Field() && ptn->as_Field()->is_oop()) {
- if (add_edge(ptn, phantom_obj)) {
+ PointsToNode* field = i.get();
+ if (field->is_Field() && field->as_Field()->is_oop()) {
+ if (add_edge(field, phantom_obj)) {
// New edge was added
new_edges++;
- add_field_uses_to_worklist(ptn->as_Field());
+ add_field_uses_to_worklist(field->as_Field());
}
}
}
@@ -1413,30 +1413,30 @@
// captured by Initialize node.
//
for (EdgeIterator i(pta); i.has_next(); i.next()) {
- PointsToNode* ptn = i.get(); // Field (AddP)
- if (!ptn->is_Field() || !ptn->as_Field()->is_oop())
+ PointsToNode* field = i.get(); // Field (AddP)
+ if (!field->is_Field() || !field->as_Field()->is_oop())
continue; // Not oop field
- int offset = ptn->as_Field()->offset();
+ int offset = field->as_Field()->offset();
if (offset == Type::OffsetBot) {
if (!visited_bottom_offset) {
// OffsetBot is used to reference array's element,
// always add reference to NULL to all Field nodes since we don't
// known which element is referenced.
- if (add_edge(ptn, null_obj)) {
+ if (add_edge(field, null_obj)) {
// New edge was added
new_edges++;
- add_field_uses_to_worklist(ptn->as_Field());
+ add_field_uses_to_worklist(field->as_Field());
visited_bottom_offset = true;
}
}
} else {
// Check only oop fields.
- const Type* adr_type = ptn->ideal_node()->as_AddP()->bottom_type();
+ const Type* adr_type = field->ideal_node()->as_AddP()->bottom_type();
if (adr_type->isa_rawptr()) {
#ifdef ASSERT
// Raw pointers are used for initializing stores so skip it
// since it should be recorded already
- Node* base = get_addp_base(ptn->ideal_node());
+ Node* base = get_addp_base(field->ideal_node());
assert(adr_type->isa_rawptr() && base->is_Proj() &&
(base->in(0) == alloc),"unexpected pointer type");
#endif
@@ -1446,10 +1446,54 @@
offsets_worklist.append(offset);
Node* value = NULL;
if (ini != NULL) {
- BasicType ft = UseCompressedOops ? T_NARROWOOP : T_OBJECT;
- Node* store = ini->find_captured_store(offset, type2aelembytes(ft), phase);
- if (store != NULL && store->is_Store()) {
+ // StoreP::memory_type() == T_ADDRESS
+ BasicType ft = UseCompressedOops ? T_NARROWOOP : T_ADDRESS;
+ Node* store = ini->find_captured_store(offset, type2aelembytes(ft, true), phase);
+ // Make sure initializing store has the same type as this AddP.
+ // This AddP may reference non existing field because it is on a
+ // dead branch of bimorphic call which is not eliminated yet.
+ if (store != NULL && store->is_Store() &&
+ store->as_Store()->memory_type() == ft) {
value = store->in(MemNode::ValueIn);
+#ifdef ASSERT
+ if (VerifyConnectionGraph) {
+ // Verify that AddP already points to all objects the value points to.
+ PointsToNode* val = ptnode_adr(value->_idx);
+ assert((val != NULL), "should be processed already");
+ PointsToNode* missed_obj = NULL;
+ if (val->is_JavaObject()) {
+ if (!field->points_to(val->as_JavaObject())) {
+ missed_obj = val;
+ }
+ } else {
+ if (!val->is_LocalVar() || (val->edge_count() == 0)) {
+ tty->print_cr("----------init store has invalid value -----");
+ store->dump();
+ val->dump();
+ assert(val->is_LocalVar() && (val->edge_count() > 0), "should be processed already");
+ }
+ for (EdgeIterator j(val); j.has_next(); j.next()) {
+ PointsToNode* obj = j.get();
+ if (obj->is_JavaObject()) {
+ if (!field->points_to(obj->as_JavaObject())) {
+ missed_obj = obj;
+ break;
+ }
+ }
+ }
+ }
+ if (missed_obj != NULL) {
+ tty->print_cr("----------field---------------------------------");
+ field->dump();
+ tty->print_cr("----------missed referernce to object-----------");
+ missed_obj->dump();
+ tty->print_cr("----------object referernced by init store -----");
+ store->dump();
+ val->dump();
+ assert(!field->points_to(missed_obj->as_JavaObject()), "missed JavaObject reference");
+ }
+ }
+#endif
} else {
// There could be initializing stores which follow allocation.
// For example, a volatile field store is not collected
@@ -1462,10 +1506,10 @@
}
if (value == NULL) {
// A field's initializing value was not recorded. Add NULL.
- if (add_edge(ptn, null_obj)) {
+ if (add_edge(field, null_obj)) {
// New edge was added
new_edges++;
- add_field_uses_to_worklist(ptn->as_Field());
+ add_field_uses_to_worklist(field->as_Field());
}
}
}
@@ -1607,7 +1651,26 @@
}
// Verify that all fields have initializing values.
if (field->edge_count() == 0) {
+ tty->print_cr("----------field does not have references----------");
field->dump();
+ for (BaseIterator i(field); i.has_next(); i.next()) {
+ PointsToNode* base = i.get();
+ tty->print_cr("----------field has next base---------------------");
+ base->dump();
+ if (base->is_JavaObject() && (base != phantom_obj) && (base != null_obj)) {
+ tty->print_cr("----------base has fields-------------------------");
+ for (EdgeIterator j(base); j.has_next(); j.next()) {
+ j.get()->dump();
+ }
+ tty->print_cr("----------base has references---------------------");
+ for (UseIterator j(base); j.has_next(); j.next()) {
+ j.get()->dump();
+ }
+ }
+ }
+ for (UseIterator i(field); i.has_next(); i.next()) {
+ i.get()->dump();
+ }
assert(field->edge_count() > 0, "sanity");
}
}
@@ -1967,7 +2030,7 @@
if (is_JavaObject()) {
return (this == ptn);
}
- assert(is_LocalVar(), "sanity");
+ assert(is_LocalVar() || is_Field(), "sanity");
for (EdgeIterator i(this); i.has_next(); i.next()) {
if (i.get() == ptn)
return true;
@@ -3127,10 +3190,14 @@
EscapeState fields_es = fields_escape_state();
tty->print("%s(%s) ", esc_names[(int)es], esc_names[(int)fields_es]);
if (nt == PointsToNode::JavaObject && !this->scalar_replaceable())
- tty->print("NSR");
+ tty->print("NSR ");
}
if (is_Field()) {
FieldNode* f = (FieldNode*)this;
+ if (f->is_oop())
+ tty->print("oop ");
+ if (f->offset() > 0)
+ tty->print("+%d ", f->offset());
tty->print("(");
for (BaseIterator i(f); i.has_next(); i.next()) {
PointsToNode* b = i.get();
--- a/hotspot/src/share/vm/prims/jni.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/prims/jni.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -2985,7 +2985,7 @@
}
// A jfieldID for a static field is a JNIid specifying the field holder and the offset within the Klass*
- JNIid* id = InstanceKlass::cast(fd.field_holder())->jni_id_for(fd.offset());
+ JNIid* id = fd.field_holder()->jni_id_for(fd.offset());
debug_only(id->set_is_static_field_id();)
debug_only(id->verify(fd.field_holder()));
@@ -4016,7 +4016,7 @@
if (PrintJNIResolving) {
ResourceMark rm(THREAD);
tty->print_cr("[Registering JNI native method %s.%s]",
- Klass::cast(method->method_holder())->external_name(),
+ method->method_holder()->external_name(),
method->name()->as_C_string());
}
return true;
--- a/hotspot/src/share/vm/prims/jvm.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/prims/jvm.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -125,7 +125,7 @@
int line_number = -1;
const char * source_file = NULL;
const char * trace = "explicit";
- Klass* caller = NULL;
+ InstanceKlass* caller = NULL;
JavaThread* jthread = JavaThread::current();
if (jthread->has_last_Java_frame()) {
vframeStream vfst(jthread);
@@ -153,17 +153,17 @@
// that caller, otherwise keep quiet since this should be picked up elsewhere.
bool found_it = false;
if (!vfst.at_end() &&
- InstanceKlass::cast(vfst.method()->method_holder())->name() == vmSymbols::java_lang_Class() &&
+ vfst.method()->method_holder()->name() == vmSymbols::java_lang_Class() &&
vfst.method()->name() == vmSymbols::forName0_name()) {
vfst.next();
if (!vfst.at_end() &&
- InstanceKlass::cast(vfst.method()->method_holder())->name() == vmSymbols::java_lang_Class() &&
+ vfst.method()->method_holder()->name() == vmSymbols::java_lang_Class() &&
vfst.method()->name() == vmSymbols::forName_name()) {
vfst.next();
found_it = true;
}
} else if (last_caller != NULL &&
- InstanceKlass::cast(last_caller->method_holder())->name() ==
+ last_caller->method_holder()->name() ==
vmSymbols::java_lang_ClassLoader() &&
(last_caller->name() == vmSymbols::loadClassInternal_name() ||
last_caller->name() == vmSymbols::loadClass_name())) {
@@ -182,7 +182,7 @@
// show method name if it's a native method
trace = vfst.method()->name_and_sig_as_C_string();
}
- Symbol* s = InstanceKlass::cast(caller)->source_file_name();
+ Symbol* s = caller->source_file_name();
if (s != NULL) {
source_file = s->as_C_string();
}
@@ -190,8 +190,8 @@
}
if (caller != NULL) {
if (to_class != caller) {
- const char * from = Klass::cast(caller)->external_name();
- const char * to = Klass::cast(to_class)->external_name();
+ const char * from = caller->external_name();
+ const char * to = to_class->external_name();
// print in a single call to reduce interleaving between threads
if (source_file != NULL) {
tty->print("RESOLVE %s %s %s:%d (%s)\n", from, to, source_file, line_number, trace);
@@ -1228,7 +1228,7 @@
privileged_context = Handle(thread, thread->privileged_stack_top()->privileged_context());
protection_domain = thread->privileged_stack_top()->protection_domain();
} else {
- protection_domain = InstanceKlass::cast(method->method_holder())->protection_domain();
+ protection_domain = method->method_holder()->protection_domain();
}
if ((previous_protection_domain != protection_domain) && (protection_domain != NULL)) {
@@ -3048,10 +3048,10 @@
Method* m = vfst.method();
if (!m->is_native()) {
- Klass* holder = m->method_holder();
- oop loader = InstanceKlass::cast(holder)->class_loader();
+ InstanceKlass* holder = m->method_holder();
+ oop loader = holder->class_loader();
if (loader != NULL && !java_lang_ClassLoader::is_trusted_loader(loader)) {
- return (jclass) JNIHandles::make_local(env, Klass::cast(holder)->java_mirror());
+ return (jclass) JNIHandles::make_local(env, holder->java_mirror());
}
}
}
@@ -3071,9 +3071,9 @@
Method* m = vfst.method();
if (!m->is_native()) {
- Klass* holder = m->method_holder();
+ InstanceKlass* holder = m->method_holder();
assert(holder->is_klass(), "just checking");
- oop loader = InstanceKlass::cast(holder)->class_loader();
+ oop loader = holder->class_loader();
if (loader != NULL && !java_lang_ClassLoader::is_trusted_loader(loader)) {
return JNIHandles::make_local(env, loader);
}
@@ -3148,9 +3148,9 @@
for(vframeStream vfst(thread); !vfst.at_end(); vfst.next()) {
if (!vfst.method()->is_native()) {
- Klass* holder = vfst.method()->method_holder();
+ InstanceKlass* holder = vfst.method()->method_holder();
assert(holder->is_klass(), "just checking");
- if (InstanceKlass::cast(holder)->name() == class_name_sym) {
+ if (holder->name() == class_name_sym) {
return depth;
}
depth++;
@@ -3171,9 +3171,9 @@
Method* m = vfst.method();
if (!m->is_native()) {
- Klass* holder = m->method_holder();
+ InstanceKlass* holder = m->method_holder();
assert(holder->is_klass(), "just checking");
- oop loader = InstanceKlass::cast(holder)->class_loader();
+ oop loader = holder->class_loader();
if (loader != NULL && !java_lang_ClassLoader::is_trusted_loader(loader)) {
return depth;
}
@@ -3322,8 +3322,7 @@
for (vframeStream vfst(thread); !vfst.at_end(); vfst.next()) {
// UseNewReflection
vfst.skip_reflection_related_frames(); // Only needed for 1.4 reflection
- Klass* holder = vfst.method()->method_holder();
- oop loader = InstanceKlass::cast(holder)->class_loader();
+ oop loader = vfst.method()->method_holder()->class_loader();
if (loader != NULL) {
return JNIHandles::make_local(env, loader);
}
@@ -3365,9 +3364,9 @@
!vfst.at_end() && loader == NULL;
vfst.next()) {
if (!vfst.method()->is_native()) {
- Klass* holder = vfst.method()->method_holder();
- loader = InstanceKlass::cast(holder)->class_loader();
- protection_domain = InstanceKlass::cast(holder)->protection_domain();
+ InstanceKlass* holder = vfst.method()->method_holder();
+ loader = holder->class_loader();
+ protection_domain = holder->protection_domain();
}
}
} else {
--- a/hotspot/src/share/vm/prims/jvmtiClassFileReconstituter.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/prims/jvmtiClassFileReconstituter.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -753,7 +753,7 @@
unsigned char* p = bytecodes;
Bytecodes::Code code;
- bool is_rewritten = InstanceKlass::cast(mh->method_holder())->is_rewritten();
+ bool is_rewritten = mh->method_holder()->is_rewritten();
while ((code = bs.next()) >= 0) {
assert(Bytecodes::is_java_code(code), "sanity check");
--- a/hotspot/src/share/vm/prims/jvmtiEnv.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/prims/jvmtiEnv.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -2822,7 +2822,7 @@
JavaThread* current_thread = JavaThread::current();
// does the klass have any local variable information?
- InstanceKlass* ik = InstanceKlass::cast(method_oop->method_holder());
+ InstanceKlass* ik = method_oop->method_holder();
if (!ik->access_flags().has_localvariable_table()) {
return (JVMTI_ERROR_ABSENT_INFORMATION);
}
--- a/hotspot/src/share/vm/prims/methodHandles.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/prims/methodHandles.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -233,7 +233,7 @@
methodHandle m = info.resolved_method();
KlassHandle defc = info.resolved_klass();
int vmindex = -1;
- if (defc->is_interface() && Klass::cast(m->method_holder())->is_interface()) {
+ if (defc->is_interface() && m->method_holder()->is_interface()) {
// LinkResolver does not report itable indexes! (fix this?)
vmindex = klassItable::compute_itable_index(m());
} else if (m->can_be_statically_bound()) {
@@ -749,8 +749,8 @@
DEBUG_ONLY(vmtarget = NULL); // safety
if (m.is_null()) break;
if (!have_defc) {
- Klass* defc = m->method_holder();
- java_lang_invoke_MemberName::set_clazz(mname(), Klass::cast(defc)->java_mirror());
+ InstanceKlass* defc = m->method_holder();
+ java_lang_invoke_MemberName::set_clazz(mname(), defc->java_mirror());
}
if (!have_name) {
//not java_lang_String::create_from_symbol; let's intern member names
--- a/hotspot/src/share/vm/prims/nativeLookup.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/prims/nativeLookup.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -165,8 +165,7 @@
// Note: It is critical for bootstrapping that Java_java_lang_ClassLoader_00024NativeLibrary_find
// gets found the first time around - otherwise an infinite loop can occure. This is
// another VM/library dependency
- Handle loader(THREAD,
- InstanceKlass::cast(method->method_holder())->class_loader());
+ Handle loader(THREAD, method->method_holder()->class_loader());
if (loader.is_null()) {
entry = lookup_special_native(jni_name);
if (entry == NULL) {
@@ -393,7 +392,7 @@
if (PrintJNIResolving) {
ResourceMark rm(THREAD);
tty->print_cr("[Dynamic-linking native method %s.%s ... JNI]",
- Klass::cast(method->method_holder())->external_name(),
+ method->method_holder()->external_name(),
method->name()->as_C_string());
}
}
--- a/hotspot/src/share/vm/runtime/compilationPolicy.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/runtime/compilationPolicy.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -627,7 +627,7 @@
// negative filter: should send NOT be inlined? returns NULL (--> inline) or rejection msg
if (m->is_abstract()) return (_msg = "abstract method");
// note: we allow ik->is_abstract()
- if (!InstanceKlass::cast(m->method_holder())->is_initialized()) return (_msg = "method holder not initialized");
+ if (!m->method_holder()->is_initialized()) return (_msg = "method holder not initialized");
if (m->is_native()) return (_msg = "native method");
nmethod* m_code = m->code();
if (m_code != NULL && m_code->code_size() > InlineSmallCode)
--- a/hotspot/src/share/vm/runtime/deoptimization.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/runtime/deoptimization.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -1191,12 +1191,12 @@
if (!constant_pool->tag_at(index).is_symbol()) return;
- Handle class_loader (THREAD, InstanceKlass::cast(constant_pool->pool_holder())->class_loader());
+ Handle class_loader (THREAD, constant_pool->pool_holder()->class_loader());
Symbol* symbol = constant_pool->symbol_at(index);
// class name?
if (symbol->byte_at(0) != '(') {
- Handle protection_domain (THREAD, Klass::cast(constant_pool->pool_holder())->protection_domain());
+ Handle protection_domain (THREAD, constant_pool->pool_holder()->protection_domain());
SystemDictionary::resolve_or_null(symbol, class_loader, protection_domain, CHECK);
return;
}
@@ -1206,7 +1206,7 @@
for (SignatureStream ss(symbol); !ss.is_done(); ss.next()) {
if (ss.is_object()) {
Symbol* class_name = ss.as_symbol(CHECK);
- Handle protection_domain (THREAD, Klass::cast(constant_pool->pool_holder())->protection_domain());
+ Handle protection_domain (THREAD, constant_pool->pool_holder()->protection_domain());
SystemDictionary::resolve_or_null(class_name, class_loader, protection_domain, CHECK);
}
}
--- a/hotspot/src/share/vm/runtime/fieldDescriptor.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/runtime/fieldDescriptor.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -36,7 +36,7 @@
oop fieldDescriptor::loader() const {
- return InstanceKlass::cast(_cp->pool_holder())->class_loader();
+ return _cp->pool_holder()->class_loader();
}
Symbol* fieldDescriptor::generic_signature() const {
@@ -45,7 +45,7 @@
}
int idx = 0;
- InstanceKlass* ik = InstanceKlass::cast(field_holder());
+ InstanceKlass* ik = field_holder();
for (AllFieldStream fs(ik); !fs.done(); fs.next()) {
if (idx == _index) {
return fs.generic_signature();
@@ -58,7 +58,7 @@
}
AnnotationArray* fieldDescriptor::annotations() const {
- InstanceKlass* ik = InstanceKlass::cast(field_holder());
+ InstanceKlass* ik = field_holder();
Array<AnnotationArray*>* md = ik->fields_annotations();
if (md == NULL)
return NULL;
--- a/hotspot/src/share/vm/runtime/fieldDescriptor.hpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/runtime/fieldDescriptor.hpp Fri Nov 09 08:20:03 2012 -0800
@@ -43,12 +43,12 @@
// update the access_flags for the field in the klass
void update_klass_field_access_flag() {
- InstanceKlass* ik = InstanceKlass::cast(field_holder());
+ InstanceKlass* ik = field_holder();
ik->field(index())->set_access_flags(_access_flags.as_short());
}
FieldInfo* field() const {
- InstanceKlass* ik = InstanceKlass::cast(field_holder());
+ InstanceKlass* ik = field_holder();
return ik->field(_index);
}
@@ -59,46 +59,46 @@
Symbol* signature() const {
return field()->signature(_cp);
}
- Klass* field_holder() const { return _cp->pool_holder(); }
- ConstantPool* constants() const { return _cp(); }
- AccessFlags access_flags() const { return _access_flags; }
- oop loader() const;
+ InstanceKlass* field_holder() const { return _cp->pool_holder(); }
+ ConstantPool* constants() const { return _cp(); }
+ AccessFlags access_flags() const { return _access_flags; }
+ oop loader() const;
// Offset (in words) of field from start of instanceOop / Klass*
- int offset() const { return field()->offset(); }
- Symbol* generic_signature() const;
- int index() const { return _index; }
- AnnotationArray* annotations() const;
+ int offset() const { return field()->offset(); }
+ Symbol* generic_signature() const;
+ int index() const { return _index; }
+ AnnotationArray* annotations() const;
// Initial field value
- bool has_initial_value() const { return field()->initval_index() != 0; }
- int initial_value_index() const { return field()->initval_index(); }
+ bool has_initial_value() const { return field()->initval_index() != 0; }
+ int initial_value_index() const { return field()->initval_index(); }
constantTag initial_value_tag() const; // The tag will return true on one of is_int(), is_long(), is_single(), is_double()
- jint int_initial_value() const;
- jlong long_initial_value() const;
- jfloat float_initial_value() const;
- jdouble double_initial_value() const;
- oop string_initial_value(TRAPS) const;
+ jint int_initial_value() const;
+ jlong long_initial_value() const;
+ jfloat float_initial_value() const;
+ jdouble double_initial_value() const;
+ oop string_initial_value(TRAPS) const;
// Field signature type
- BasicType field_type() const { return FieldType::basic_type(signature()); }
+ BasicType field_type() const { return FieldType::basic_type(signature()); }
// Access flags
- bool is_public() const { return access_flags().is_public(); }
- bool is_private() const { return access_flags().is_private(); }
- bool is_protected() const { return access_flags().is_protected(); }
- bool is_package_private() const { return !is_public() && !is_private() && !is_protected(); }
+ bool is_public() const { return access_flags().is_public(); }
+ bool is_private() const { return access_flags().is_private(); }
+ bool is_protected() const { return access_flags().is_protected(); }
+ bool is_package_private() const { return !is_public() && !is_private() && !is_protected(); }
- bool is_static() const { return access_flags().is_static(); }
- bool is_final() const { return access_flags().is_final(); }
- bool is_volatile() const { return access_flags().is_volatile(); }
- bool is_transient() const { return access_flags().is_transient(); }
+ bool is_static() const { return access_flags().is_static(); }
+ bool is_final() const { return access_flags().is_final(); }
+ bool is_volatile() const { return access_flags().is_volatile(); }
+ bool is_transient() const { return access_flags().is_transient(); }
- bool is_synthetic() const { return access_flags().is_synthetic(); }
+ bool is_synthetic() const { return access_flags().is_synthetic(); }
- bool is_field_access_watched() const { return access_flags().is_field_access_watched(); }
+ bool is_field_access_watched() const { return access_flags().is_field_access_watched(); }
bool is_field_modification_watched() const
- { return access_flags().is_field_modification_watched(); }
- bool has_generic_signature() const { return access_flags().field_has_generic_signature(); }
+ { return access_flags().is_field_modification_watched(); }
+ bool has_generic_signature() const { return access_flags().field_has_generic_signature(); }
void set_is_field_access_watched(const bool value) {
_access_flags.set_is_field_access_watched(value);
--- a/hotspot/src/share/vm/runtime/globals.hpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/runtime/globals.hpp Fri Nov 09 08:20:03 2012 -0800
@@ -3596,6 +3596,15 @@
product(uintx, StringTableSize, 1009, \
"Number of buckets in the interned String table") \
\
+ develop(bool, TraceDefaultMethods, false, \
+ "Trace the default method processing steps") \
+ \
+ develop(bool, ParseAllGenericSignatures, false, \
+ "Parse all generic signatures while classloading") \
+ \
+ develop(bool, VerifyGenericSignatures, false, \
+ "Abort VM on erroneous or inconsistent generic signatures") \
+ \
product(bool, UseVMInterruptibleIO, false, \
"(Unstable, Solaris-specific) Thread interrupt before or with " \
"EINTR for I/O operations results in OS_INTRPT. The default value"\
--- a/hotspot/src/share/vm/runtime/javaCalls.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/runtime/javaCalls.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -189,7 +189,7 @@
assert(method->name() == vmSymbols::object_initializer_name(), "Should only be called for default constructor");
assert(method->signature() == vmSymbols::void_method_signature(), "Should only be called for default constructor");
- InstanceKlass* ik = InstanceKlass::cast(method->method_holder());
+ InstanceKlass* ik = method->method_holder();
if (ik->is_initialized() && ik->has_vanilla_constructor()) {
// safe to skip constructor call
} else {
@@ -344,11 +344,11 @@
#ifdef ASSERT
- { Klass* holder = method->method_holder();
+ { InstanceKlass* holder = method->method_holder();
// A klass might not be initialized since JavaCall's might be used during the executing of
// the <clinit>. For example, a Thread.start might start executing on an object that is
// not fully initialized! (bad Java programming style)
- assert(InstanceKlass::cast(holder)->is_linked(), "rewritting must have taken place");
+ assert(holder->is_linked(), "rewritting must have taken place");
}
#endif
--- a/hotspot/src/share/vm/runtime/mutexLocker.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/runtime/mutexLocker.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -140,6 +140,7 @@
Monitor* JfrMsg_lock = NULL;
Mutex* JfrBuffer_lock = NULL;
Mutex* JfrStream_lock = NULL;
+Monitor* PeriodicTask_lock = NULL;
#define MAX_NUM_MUTEX 128
static Monitor * _mutex_array[MAX_NUM_MUTEX];
@@ -285,6 +286,7 @@
def(JfrMsg_lock , Monitor, nonleaf+2, true);
def(JfrBuffer_lock , Mutex, nonleaf+3, true);
def(JfrStream_lock , Mutex, nonleaf+4, true);
+ def(PeriodicTask_lock , Monitor, nonleaf+5, true);
}
GCMutexLocker::GCMutexLocker(Monitor * mutex) {
--- a/hotspot/src/share/vm/runtime/mutexLocker.hpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/runtime/mutexLocker.hpp Fri Nov 09 08:20:03 2012 -0800
@@ -142,6 +142,7 @@
extern Monitor* JfrMsg_lock; // protects JFR messaging
extern Mutex* JfrBuffer_lock; // protects JFR buffer operations
extern Mutex* JfrStream_lock; // protects JFR stream access
+extern Monitor* PeriodicTask_lock; // protects the periodic task structure
// A MutexLocker provides mutual exclusion with respect to a given mutex
// for the scope which contains the locker. The lock is an OS lock, not
--- a/hotspot/src/share/vm/runtime/reflection.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/runtime/reflection.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -56,14 +56,14 @@
vframeStream vfst(jthread);
// skip over any frames belonging to java.lang.Class
while (!vfst.at_end() &&
- InstanceKlass::cast(vfst.method()->method_holder())->name() == vmSymbols::java_lang_Class()) {
+ vfst.method()->method_holder()->name() == vmSymbols::java_lang_Class()) {
vfst.next();
}
if (!vfst.at_end()) {
// this frame is a likely suspect
caller = vfst.method()->method_holder();
line_number = vfst.method()->line_number_from_bci(vfst.bci());
- Symbol* s = InstanceKlass::cast(vfst.method()->method_holder())->source_file_name();
+ Symbol* s = vfst.method()->method_holder()->source_file_name();
if (s != NULL) {
source_file = s->as_C_string();
}
@@ -472,6 +472,12 @@
return true;
}
+ // Also allow all accesses from
+ // java/lang/invoke/MagicLambdaImpl subclasses to succeed trivially.
+ if (current_class->is_subclass_of(SystemDictionary::lambda_MagicLambdaImpl_klass())) {
+ return true;
+ }
+
return can_relax_access_check_for(current_class, new_class, classloader_only);
}
@@ -564,6 +570,12 @@
return true;
}
+ // Also allow all accesses from
+ // java/lang/invoke/MagicLambdaImpl subclasses to succeed trivially.
+ if (current_class->is_subclass_of(SystemDictionary::lambda_MagicLambdaImpl_klass())) {
+ return true;
+ }
+
return can_relax_access_check_for(
current_class, field_class, classloader_only);
}
@@ -630,8 +642,8 @@
case T_OBJECT:
case T_ARRAY:
Symbol* name = ss->as_symbol(CHECK_NULL);
- oop loader = InstanceKlass::cast(method->method_holder())->class_loader();
- oop protection_domain = InstanceKlass::cast(method->method_holder())->protection_domain();
+ oop loader = method->method_holder()->class_loader();
+ oop protection_domain = method->method_holder()->protection_domain();
Klass* k = SystemDictionary::resolve_or_fail(
name,
Handle(THREAD, loader),
@@ -702,7 +714,7 @@
assert(!method()->is_initializer() ||
(for_constant_pool_access && method()->is_static()) ||
(method()->name() == vmSymbols::class_initializer_name()
- && Klass::cast(method()->method_holder())->is_interface() && JDK_Version::is_jdk12x_version()), "should call new_constructor instead");
+ && method()->method_holder()->is_interface() && JDK_Version::is_jdk12x_version()), "should call new_constructor instead");
instanceKlassHandle holder (THREAD, method->method_holder());
int slot = method->method_idnum();
@@ -820,7 +832,7 @@
Handle type = new_type(signature, holder, CHECK_NULL);
Handle rh = java_lang_reflect_Field::create(CHECK_NULL);
- java_lang_reflect_Field::set_clazz(rh(), Klass::cast(fd->field_holder())->java_mirror());
+ java_lang_reflect_Field::set_clazz(rh(), fd->field_holder()->java_mirror());
java_lang_reflect_Field::set_slot(rh(), fd->index());
java_lang_reflect_Field::set_name(rh(), name());
java_lang_reflect_Field::set_type(rh(), type());
@@ -888,7 +900,7 @@
method = reflected_method;
} else {
// resolve based on the receiver
- if (InstanceKlass::cast(reflected_method->method_holder())->is_interface()) {
+ if (reflected_method->method_holder()->is_interface()) {
// resolve interface call
if (ReflectionWrapResolutionErrors) {
// new default: 6531596
--- a/hotspot/src/share/vm/runtime/task.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/runtime/task.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -61,7 +61,7 @@
}
#endif
-void PeriodicTask::real_time_tick(size_t delay_time) {
+void PeriodicTask::real_time_tick(int delay_time) {
#ifndef PRODUCT
if (ProfilerCheckIntervals) {
_ticks++;
@@ -73,19 +73,39 @@
_intervalHistogram[ms]++;
}
#endif
- int orig_num_tasks = _num_tasks;
- for(int index = 0; index < _num_tasks; index++) {
- _tasks[index]->execute_if_pending(delay_time);
- if (_num_tasks < orig_num_tasks) { // task dis-enrolled itself
- index--; // re-do current slot as it has changed
- orig_num_tasks = _num_tasks;
+
+ {
+ MutexLockerEx ml(PeriodicTask_lock, Mutex::_no_safepoint_check_flag);
+ int orig_num_tasks = _num_tasks;
+
+ for(int index = 0; index < _num_tasks; index++) {
+ _tasks[index]->execute_if_pending(delay_time);
+ if (_num_tasks < orig_num_tasks) { // task dis-enrolled itself
+ index--; // re-do current slot as it has changed
+ orig_num_tasks = _num_tasks;
+ }
}
}
}
+int PeriodicTask::time_to_wait() {
+ MutexLockerEx ml(PeriodicTask_lock->owned_by_self() ?
+ NULL : PeriodicTask_lock, Mutex::_no_safepoint_check_flag);
+
+ if (_num_tasks == 0) {
+ return 0; // sleep until shutdown or a task is enrolled
+ }
+
+ int delay = _tasks[0]->time_to_next_interval();
+ for (int index = 1; index < _num_tasks; index++) {
+ delay = MIN2(delay, _tasks[index]->time_to_next_interval());
+ }
+ return delay;
+}
+
PeriodicTask::PeriodicTask(size_t interval_time) :
- _counter(0), _interval(interval_time) {
+ _counter(0), _interval((int) interval_time) {
// Sanity check the interval time
assert(_interval >= PeriodicTask::min_interval &&
_interval <= PeriodicTask::max_interval &&
@@ -94,33 +114,40 @@
}
PeriodicTask::~PeriodicTask() {
- if (is_enrolled())
- disenroll();
-}
-
-bool PeriodicTask::is_enrolled() const {
- for(int index = 0; index < _num_tasks; index++)
- if (_tasks[index] == this) return true;
- return false;
+ disenroll();
}
void PeriodicTask::enroll() {
- assert(WatcherThread::watcher_thread() == NULL, "dynamic enrollment of tasks not yet supported");
+ MutexLockerEx ml(PeriodicTask_lock->owned_by_self() ?
+ NULL : PeriodicTask_lock, Mutex::_no_safepoint_check_flag);
- if (_num_tasks == PeriodicTask::max_tasks)
+ if (_num_tasks == PeriodicTask::max_tasks) {
fatal("Overflow in PeriodicTask table");
+ }
_tasks[_num_tasks++] = this;
+
+ WatcherThread* thread = WatcherThread::watcher_thread();
+ if (thread) {
+ thread->unpark();
+ } else {
+ WatcherThread::start();
+ }
}
void PeriodicTask::disenroll() {
- assert(WatcherThread::watcher_thread() == NULL ||
- Thread::current() == WatcherThread::watcher_thread(),
- "dynamic disenrollment currently only handled from WatcherThread from within task() method");
+ MutexLockerEx ml(PeriodicTask_lock->owned_by_self() ?
+ NULL : PeriodicTask_lock, Mutex::_no_safepoint_check_flag);
int index;
- for(index = 0; index < _num_tasks && _tasks[index] != this; index++);
- if (index == _num_tasks) return;
+ for(index = 0; index < _num_tasks && _tasks[index] != this; index++)
+ ;
+
+ if (index == _num_tasks) {
+ return;
+ }
+
_num_tasks--;
+
for (; index < _num_tasks; index++) {
_tasks[index] = _tasks[index+1];
}
--- a/hotspot/src/share/vm/runtime/task.hpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/runtime/task.hpp Fri Nov 09 08:20:03 2012 -0800
@@ -49,12 +49,12 @@
static int num_tasks() { return _num_tasks; }
private:
- size_t _counter;
- const size_t _interval;
+ int _counter;
+ const int _interval;
static int _num_tasks;
static PeriodicTask* _tasks[PeriodicTask::max_tasks];
- static void real_time_tick(size_t delay_time);
+ static void real_time_tick(int delay_time);
#ifndef PRODUCT
static elapsedTimer _timer; // measures time between ticks
@@ -69,51 +69,36 @@
PeriodicTask(size_t interval_time); // interval is in milliseconds of elapsed time
~PeriodicTask();
- // Tells whether is enrolled
- bool is_enrolled() const;
-
// Make the task active
- // NOTE: this may only be called before the WatcherThread has been started
+ // For dynamic enrollment at the time T, the task will execute somewhere
+ // between T and T + interval_time.
void enroll();
// Make the task deactive
- // NOTE: this may only be called either while the WatcherThread is
- // inactive or by a task from within its task() method. One-shot or
- // several-shot tasks may be implemented this way.
void disenroll();
- void execute_if_pending(size_t delay_time) {
- _counter += delay_time;
- if (_counter >= _interval) {
+ void execute_if_pending(int delay_time) {
+ // make sure we don't overflow
+ jlong tmp = (jlong) _counter + (jlong) delay_time;
+
+ if (tmp >= (jlong) _interval) {
_counter = 0;
task();
+ } else {
+ _counter += delay_time;
}
}
// Returns how long (time in milliseconds) before the next time we should
// execute this task.
- size_t time_to_next_interval() const {
+ int time_to_next_interval() const {
assert(_interval > _counter, "task counter greater than interval?");
return _interval - _counter;
}
// Calculate when the next periodic task will fire.
// Called by the WatcherThread's run method.
- // This assumes that periodic tasks aren't entering the system
- // dynamically, except for during startup.
- static size_t time_to_wait() {
- if (_num_tasks == 0) {
- // Don't wait any more; shut down the thread since we don't
- // currently support dynamic enrollment.
- return 0;
- }
-
- size_t delay = _tasks[0]->time_to_next_interval();
- for (int index = 1; index < _num_tasks; index++) {
- delay = MIN2(delay, _tasks[index]->time_to_next_interval());
- }
- return delay;
- }
+ static int time_to_wait();
// The task to perform at each period
virtual void task() = 0;
--- a/hotspot/src/share/vm/runtime/thread.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/runtime/thread.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -1217,6 +1217,7 @@
// timer interrupts exists on the platform.
WatcherThread* WatcherThread::_watcher_thread = NULL;
+bool WatcherThread::_startable = false;
volatile bool WatcherThread::_should_terminate = false;
WatcherThread::WatcherThread() : Thread() {
@@ -1237,6 +1238,55 @@
}
}
+int WatcherThread::sleep() const {
+ MutexLockerEx ml(PeriodicTask_lock, Mutex::_no_safepoint_check_flag);
+
+ // remaining will be zero if there are no tasks,
+ // causing the WatcherThread to sleep until a task is
+ // enrolled
+ int remaining = PeriodicTask::time_to_wait();
+ int time_slept = 0;
+
+ // we expect this to timeout - we only ever get unparked when
+ // we should terminate or when a new task has been enrolled
+ OSThreadWaitState osts(this->osthread(), false /* not Object.wait() */);
+
+ jlong time_before_loop = os::javaTimeNanos();
+
+ for (;;) {
+ bool timedout = PeriodicTask_lock->wait(Mutex::_no_safepoint_check_flag, remaining);
+ jlong now = os::javaTimeNanos();
+
+ if (remaining == 0) {
+ // if we didn't have any tasks we could have waited for a long time
+ // consider the time_slept zero and reset time_before_loop
+ time_slept = 0;
+ time_before_loop = now;
+ } else {
+ // need to recalulate since we might have new tasks in _tasks
+ time_slept = (int) ((now - time_before_loop) / 1000000);
+ }
+
+ // Change to task list or spurious wakeup of some kind
+ if (timedout || _should_terminate) {
+ break;
+ }
+
+ remaining = PeriodicTask::time_to_wait();
+ if (remaining == 0) {
+ // Last task was just disenrolled so loop around and wait until
+ // another task gets enrolled
+ continue;
+ }
+
+ remaining -= time_slept;
+ if (remaining <= 0)
+ break;
+ }
+
+ return time_slept;
+}
+
void WatcherThread::run() {
assert(this == watcher_thread(), "just checking");
@@ -1249,26 +1299,7 @@
// Calculate how long it'll be until the next PeriodicTask work
// should be done, and sleep that amount of time.
- size_t time_to_wait = PeriodicTask::time_to_wait();
-
- // we expect this to timeout - we only ever get unparked when
- // we should terminate
- {
- OSThreadWaitState osts(this->osthread(), false /* not Object.wait() */);
-
- jlong prev_time = os::javaTimeNanos();
- for (;;) {
- int res= _SleepEvent->park(time_to_wait);
- if (res == OS_TIMEOUT || _should_terminate)
- break;
- // spurious wakeup of some kind
- jlong now = os::javaTimeNanos();
- time_to_wait -= (now - prev_time) / 1000000;
- if (time_to_wait <= 0)
- break;
- prev_time = now;
- }
- }
+ int time_waited = sleep();
if (is_error_reported()) {
// A fatal error has happened, the error handler(VMError::report_and_die)
@@ -1298,13 +1329,7 @@
}
}
- PeriodicTask::real_time_tick(time_to_wait);
-
- // If we have no more tasks left due to dynamic disenrollment,
- // shut down the thread since we don't currently support dynamic enrollment
- if (PeriodicTask::num_tasks() == 0) {
- _should_terminate = true;
- }
+ PeriodicTask::real_time_tick(time_waited);
}
// Signal that it is terminated
@@ -1319,22 +1344,33 @@
}
void WatcherThread::start() {
- if (watcher_thread() == NULL) {
+ assert(PeriodicTask_lock->owned_by_self(), "PeriodicTask_lock required");
+
+ if (watcher_thread() == NULL && _startable) {
_should_terminate = false;
// Create the single instance of WatcherThread
new WatcherThread();
}
}
+void WatcherThread::make_startable() {
+ assert(PeriodicTask_lock->owned_by_self(), "PeriodicTask_lock required");
+ _startable = true;
+}
+
void WatcherThread::stop() {
+ {
+ MutexLockerEx ml(PeriodicTask_lock, Mutex::_no_safepoint_check_flag);
+ _should_terminate = true;
+ OrderAccess::fence(); // ensure WatcherThread sees update in main loop
+
+ WatcherThread* watcher = watcher_thread();
+ if (watcher != NULL)
+ watcher->unpark();
+ }
+
// it is ok to take late safepoints here, if needed
MutexLocker mu(Terminator_lock);
- _should_terminate = true;
- OrderAccess::fence(); // ensure WatcherThread sees update in main loop
-
- Thread* watcher = watcher_thread();
- if (watcher != NULL)
- watcher->_SleepEvent->unpark();
while(watcher_thread() != NULL) {
// This wait should make safepoint checks, wait without a timeout,
@@ -1352,6 +1388,11 @@
}
}
+void WatcherThread::unpark() {
+ MutexLockerEx ml(PeriodicTask_lock->owned_by_self() ? NULL : PeriodicTask_lock, Mutex::_no_safepoint_check_flag);
+ PeriodicTask_lock->notify();
+}
+
void WatcherThread::print_on(outputStream* st) const {
st->print("\"%s\" ", name());
Thread::print_on(st);
@@ -3658,12 +3699,18 @@
}
}
- // Start up the WatcherThread if there are any periodic tasks
- // NOTE: All PeriodicTasks should be registered by now. If they
- // aren't, late joiners might appear to start slowly (we might
- // take a while to process their first tick).
- if (PeriodicTask::num_tasks() > 0) {
- WatcherThread::start();
+ {
+ MutexLockerEx ml(PeriodicTask_lock, Mutex::_no_safepoint_check_flag);
+ // Make sure the watcher thread can be started by WatcherThread::start()
+ // or by dynamic enrollment.
+ WatcherThread::make_startable();
+ // Start up the WatcherThread if there are any periodic tasks
+ // NOTE: All PeriodicTasks should be registered by now. If they
+ // aren't, late joiners might appear to start slowly (we might
+ // take a while to process their first tick).
+ if (PeriodicTask::num_tasks() > 0) {
+ WatcherThread::start();
+ }
}
// Give os specific code one last chance to start
--- a/hotspot/src/share/vm/runtime/thread.hpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/runtime/thread.hpp Fri Nov 09 08:20:03 2012 -0800
@@ -722,6 +722,7 @@
private:
static WatcherThread* _watcher_thread;
+ static bool _startable;
volatile static bool _should_terminate; // updated without holding lock
public:
enum SomeConstants {
@@ -738,6 +739,7 @@
char* name() const { return (char*)"VM Periodic Task Thread"; }
void print_on(outputStream* st) const;
void print() const { print_on(tty); }
+ void unpark();
// Returns the single instance of WatcherThread
static WatcherThread* watcher_thread() { return _watcher_thread; }
@@ -745,6 +747,12 @@
// Create and start the single instance of WatcherThread, or stop it on shutdown
static void start();
static void stop();
+ // Only allow start once the VM is sufficiently initialized
+ // Otherwise the first task to enroll will trigger the start
+ static void make_startable();
+
+ private:
+ int sleep() const;
};
--- a/hotspot/src/share/vm/runtime/vframe.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/runtime/vframe.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -161,7 +161,7 @@
// If this is the first frame, and java.lang.Object.wait(...) then print out the receiver.
if (frame_count == 0) {
if (method()->name() == vmSymbols::wait_name() &&
- InstanceKlass::cast(method()->method_holder())->name() == vmSymbols::java_lang_Object()) {
+ method()->method_holder()->name() == vmSymbols::java_lang_Object()) {
StackValueCollection* locs = locals();
if (!locs->is_empty()) {
StackValue* sv = locs->at(0);
@@ -407,7 +407,7 @@
if (Universe::reflect_invoke_cache()->is_same_method(method())) {
// This is Method.invoke() -- skip it
} else if (use_new_reflection &&
- Klass::cast(method()->method_holder())
+ method()->method_holder()
->is_subclass_of(SystemDictionary::reflect_MethodAccessorImpl_klass())) {
// This is an auxilary frame -- skip it
} else if (method()->is_method_handle_intrinsic() ||
@@ -471,8 +471,8 @@
void vframeStreamCommon::skip_reflection_related_frames() {
while (!at_end() &&
(JDK_Version::is_gte_jdk14x_version() && UseNewReflection &&
- (Klass::cast(method()->method_holder())->is_subclass_of(SystemDictionary::reflect_MethodAccessorImpl_klass()) ||
- Klass::cast(method()->method_holder())->is_subclass_of(SystemDictionary::reflect_ConstructorAccessorImpl_klass())))) {
+ (method()->method_holder()->is_subclass_of(SystemDictionary::reflect_MethodAccessorImpl_klass()) ||
+ method()->method_holder()->is_subclass_of(SystemDictionary::reflect_ConstructorAccessorImpl_klass())))) {
next();
}
}
@@ -547,13 +547,13 @@
void javaVFrame::print_value() const {
Method* m = method();
- Klass* k = m->method_holder();
+ InstanceKlass* k = m->method_holder();
tty->print_cr("frame( sp=" INTPTR_FORMAT ", unextended_sp=" INTPTR_FORMAT ", fp=" INTPTR_FORMAT ", pc=" INTPTR_FORMAT ")",
_fr.sp(), _fr.unextended_sp(), _fr.fp(), _fr.pc());
tty->print("%s.%s", Klass::cast(k)->internal_name(), m->name()->as_C_string());
if (!m->is_native()) {
- Symbol* source_name = InstanceKlass::cast(k)->source_file_name();
+ Symbol* source_name = k->source_file_name();
int line_number = m->line_number_from_bci(bci());
if (source_name != NULL && (line_number != -1)) {
tty->print("(%s:%d)", source_name->as_C_string(), line_number);
--- a/hotspot/src/share/vm/runtime/vmStructs.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/runtime/vmStructs.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -289,7 +289,7 @@
nonstatic_field(CompiledICHolder, _holder_klass, Klass*) \
nonstatic_field(ConstantPool, _tags, Array<u1>*) \
nonstatic_field(ConstantPool, _cache, ConstantPoolCache*) \
- nonstatic_field(ConstantPool, _pool_holder, Klass*) \
+ nonstatic_field(ConstantPool, _pool_holder, InstanceKlass*) \
nonstatic_field(ConstantPool, _operands, Array<u2>*) \
nonstatic_field(ConstantPool, _length, int) \
nonstatic_field(ConstantPool, _resolved_references, jobject) \
--- a/hotspot/src/share/vm/services/heapDumper.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/services/heapDumper.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -1117,8 +1117,8 @@
writer->write_symbolID(m->name()); // method's name
writer->write_symbolID(m->signature()); // method's signature
- assert(Klass::cast(m->method_holder())->oop_is_instance(), "not InstanceKlass");
- writer->write_symbolID(InstanceKlass::cast(m->method_holder())->source_file_name()); // source file name
+ assert(m->method_holder()->oop_is_instance(), "not InstanceKlass");
+ writer->write_symbolID(m->method_holder()->source_file_name()); // source file name
writer->write_u4(class_serial_num); // class serial number
writer->write_u4((u4) line_number); // line number
}
--- a/hotspot/src/share/vm/services/memPtr.hpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/services/memPtr.hpp Fri Nov 09 08:20:03 2012 -0800
@@ -311,6 +311,17 @@
inline bool contains_address(address add) const {
return (addr() <= add && addr() + size() > add);
}
+
+ // if this memory region overlaps another region
+ inline bool overlaps_region(const MemPointerRecord* other) const {
+ assert(other != NULL, "Just check");
+ assert(size() > 0 && other->size() > 0, "empty range");
+ return contains_address(other->addr()) ||
+ contains_address(other->addr() + other->size() - 1) || // exclude end address
+ other->contains_address(addr()) ||
+ other->contains_address(addr() + size() - 1); // exclude end address
+ }
+
};
// MemPointerRecordEx also records callsite pc, from where
--- a/hotspot/src/share/vm/services/memSnapshot.cpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/services/memSnapshot.cpp Fri Nov 09 08:20:03 2012 -0800
@@ -31,6 +31,69 @@
#include "services/memSnapshot.hpp"
#include "services/memTracker.hpp"
+#ifdef ASSERT
+
+void decode_pointer_record(MemPointerRecord* rec) {
+ tty->print("Pointer: [" PTR_FORMAT " - " PTR_FORMAT "] size = %d bytes", rec->addr(),
+ rec->addr() + rec->size(), (int)rec->size());
+ tty->print(" type = %s", MemBaseline::type2name(FLAGS_TO_MEMORY_TYPE(rec->flags())));
+ if (rec->is_vm_pointer()) {
+ if (rec->is_allocation_record()) {
+ tty->print_cr(" (reserve)");
+ } else if (rec->is_commit_record()) {
+ tty->print_cr(" (commit)");
+ } else if (rec->is_uncommit_record()) {
+ tty->print_cr(" (uncommit)");
+ } else if (rec->is_deallocation_record()) {
+ tty->print_cr(" (release)");
+ } else {
+ tty->print_cr(" (tag)");
+ }
+ } else {
+ if (rec->is_arena_size_record()) {
+ tty->print_cr(" (arena size)");
+ } else if (rec->is_allocation_record()) {
+ tty->print_cr(" (malloc)");
+ } else {
+ tty->print_cr(" (free)");
+ }
+ }
+ if (MemTracker::track_callsite()) {
+ char buf[1024];
+ address pc = ((MemPointerRecordEx*)rec)->pc();
+ if (pc != NULL && os::dll_address_to_function_name(pc, buf, sizeof(buf), NULL)) {
+ tty->print_cr("\tfrom %s", buf);
+ } else {
+ tty->print_cr("\tcould not decode pc = " PTR_FORMAT "", pc);
+ }
+ }
+}
+
+void decode_vm_region_record(VMMemRegion* rec) {
+ tty->print("VM Region [" PTR_FORMAT " - " PTR_FORMAT "]", rec->addr(),
+ rec->addr() + rec->size());
+ tty->print(" type = %s", MemBaseline::type2name(FLAGS_TO_MEMORY_TYPE(rec->flags())));
+ if (rec->is_allocation_record()) {
+ tty->print_cr(" (reserved)");
+ } else if (rec->is_commit_record()) {
+ tty->print_cr(" (committed)");
+ } else {
+ ShouldNotReachHere();
+ }
+ if (MemTracker::track_callsite()) {
+ char buf[1024];
+ address pc = ((VMMemRegionEx*)rec)->pc();
+ if (pc != NULL && os::dll_address_to_function_name(pc, buf, sizeof(buf), NULL)) {
+ tty->print_cr("\tfrom %s", buf);
+ } else {
+ tty->print_cr("\tcould not decode pc = " PTR_FORMAT "", pc);
+ }
+
+ }
+}
+
+#endif
+
bool VMMemPointerIterator::insert_record(MemPointerRecord* rec) {
VMMemRegionEx new_rec;
@@ -73,52 +136,61 @@
return true;
}
assert(cur->base() > rec->addr(), "Just check: locate()");
- assert(rec->addr() + rec->size() <= cur->base(), "Can not overlap");
+ assert(!cur->overlaps_region(rec), "overlapping reserved regions");
return insert_record(rec);
}
// we do consolidate committed regions
bool VMMemPointerIterator::add_committed_region(MemPointerRecord* rec) {
assert(rec->is_commit_record(), "Sanity check");
- VMMemRegion* cur;
- cur = (VMMemRegion*)current();
- assert(cur->is_reserved_region() && cur->contains_region(rec),
+ VMMemRegion* reserved_rgn = (VMMemRegion*)current();
+ assert(reserved_rgn->is_reserved_region() && reserved_rgn->contains_region(rec),
"Sanity check");
// thread's native stack is always marked as "committed", ignore
// the "commit" operation for creating stack guard pages
- if (FLAGS_TO_MEMORY_TYPE(cur->flags()) == mtThreadStack &&
+ if (FLAGS_TO_MEMORY_TYPE(reserved_rgn->flags()) == mtThreadStack &&
FLAGS_TO_MEMORY_TYPE(rec->flags()) != mtThreadStack) {
return true;
}
- cur = (VMMemRegion*)next();
- while (cur != NULL && cur->is_committed_region()) {
+ // if the reserved region has any committed regions
+ VMMemRegion* committed_rgn = (VMMemRegion*)next();
+ while (committed_rgn != NULL && committed_rgn->is_committed_region()) {
// duplicated commit records
- if(cur->contains_region(rec)) {
+ if(committed_rgn->contains_region(rec)) {
return true;
- }
- if (cur->base() > rec->addr()) {
- // committed regions can not overlap
- assert(rec->addr() + rec->size() <= cur->base(), "Can not overlap");
- if (rec->addr() + rec->size() == cur->base()) {
- cur->expand_region(rec->addr(), rec->size());
- return true;
+ } else if (committed_rgn->overlaps_region(rec)) {
+ // overlaps front part
+ if (rec->addr() < committed_rgn->addr()) {
+ committed_rgn->expand_region(rec->addr(),
+ committed_rgn->addr() - rec->addr());
} else {
- return insert_record(rec);
+ // overlaps tail part
+ address committed_rgn_end = committed_rgn->addr() +
+ committed_rgn->size();
+ assert(committed_rgn_end < rec->addr() + rec->size(),
+ "overlap tail part");
+ committed_rgn->expand_region(committed_rgn_end,
+ (rec->addr() + rec->size()) - committed_rgn_end);
}
- } else if (cur->base() + cur->size() == rec->addr()) {
- cur->expand_region(rec->addr(), rec->size());
+ } else if (committed_rgn->base() + committed_rgn->size() == rec->addr()) {
+ // adjunct each other
+ committed_rgn->expand_region(rec->addr(), rec->size());
VMMemRegion* next_reg = (VMMemRegion*)next();
// see if we can consolidate next committed region
if (next_reg != NULL && next_reg->is_committed_region() &&
- next_reg->base() == cur->base() + cur->size()) {
- cur->expand_region(next_reg->base(), next_reg->size());
+ next_reg->base() == committed_rgn->base() + committed_rgn->size()) {
+ committed_rgn->expand_region(next_reg->base(), next_reg->size());
+ // delete merged region
remove();
}
return true;
+ } else if (committed_rgn->base() > rec->addr()) {
+ // found the location, insert this committed region
+ return insert_record(rec);
}
- cur = (VMMemRegion*)next();
+ committed_rgn = (VMMemRegion*)next();
}
return insert_record(rec);
}
--- a/hotspot/src/share/vm/utilities/growableArray.hpp Thu Nov 08 11:51:00 2012 -0800
+++ b/hotspot/src/share/vm/utilities/growableArray.hpp Fri Nov 09 08:20:03 2012 -0800
@@ -217,7 +217,12 @@
return missed;
}
- E at(int i) const {
+ E& at(int i) {
+ assert(0 <= i && i < _len, "illegal index");
+ return _data[i];
+ }
+
+ E const& at(int i) const {
assert(0 <= i && i < _len, "illegal index");
return _data[i];
}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hotspot/src/share/vm/utilities/pair.hpp Fri Nov 09 08:20:03 2012 -0800
@@ -0,0 +1,42 @@
+/*
+ * Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ *
+ */
+
+#ifndef SHARE_VM_UTILITIES_PAIR_HPP
+#define SHARE_VM_UTILITIES_PAIR_HPP
+
+#include "memory/allocation.hpp"
+#include "utilities/top.hpp"
+
+template<typename T, typename V, typename ALLOC_BASE = ResourceObj>
+class Pair : public ALLOC_BASE {
+ public:
+ T first;
+ V second;
+
+ Pair() {}
+ Pair(T t, V v) : first(t), second(v) {}
+};
+
+
+#endif // SHARE_VM_UTILITIES_PAIR_HPP
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hotspot/src/share/vm/utilities/resourceHash.hpp Fri Nov 09 08:20:03 2012 -0800
@@ -0,0 +1,134 @@
+/*
+ * Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ *
+ */
+
+#ifndef SHARE_VM_UTILITIES_RESOURCEHASH_HPP
+#define SHARE_VM_UTILITIES_RESOURCEHASH_HPP
+
+#include "memory/allocation.hpp"
+#include "utilities/top.hpp"
+
+template<typename K> struct ResourceHashtableFns {
+ typedef unsigned (*hash_fn)(K const&);
+ typedef bool (*equals_fn)(K const&, K const&);
+};
+
+template<typename K> unsigned primitive_hash(const K& k) {
+ unsigned hash = (unsigned)((uintptr_t)k);
+ return hash ^ (hash > 3); // just in case we're dealing with aligned ptrs
+}
+
+template<typename K> bool primitive_equals(const K& k0, const K& k1) {
+ return k0 == k1;
+}
+
+template<
+ typename K, typename V,
+ typename ResourceHashtableFns<K>::hash_fn HASH = primitive_hash<K>,
+ typename ResourceHashtableFns<K>::equals_fn EQUALS = primitive_equals<K>,
+ unsigned SIZE = 256
+ >
+class ResourceHashtable : public ResourceObj {
+ private:
+
+ class Node : public ResourceObj {
+ public:
+ unsigned _hash;
+ K _key;
+ V _value;
+ Node* _next;
+
+ Node(unsigned hash, K const& key, V const& value) :
+ _hash(hash), _key(key), _value(value), _next(NULL) {}
+ };
+
+ Node* _table[SIZE];
+
+ // Returns a pointer to where the node where the value would reside if
+ // it's in the table.
+ Node** lookup_node(unsigned hash, K const& key) {
+ unsigned index = hash % SIZE;
+ Node** ptr = &_table[index];
+ while (*ptr != NULL) {
+ Node* node = *ptr;
+ if (node->_hash == hash && EQUALS(key, node->_key)) {
+ break;
+ }
+ ptr = &(node->_next);
+ }
+ return ptr;
+ }
+
+ Node const** lookup_node(unsigned hash, K const& key) const {
+ return const_cast<Node const**>(
+ const_cast<ResourceHashtable*>(this)->lookup_node(hash, key));
+ }
+
+ public:
+ ResourceHashtable() { memset(_table, 0, SIZE * sizeof(Node*)); }
+
+ bool contains(K const& key) const {
+ return get(key) != NULL;
+ }
+
+ V* get(K const& key) const {
+ unsigned hv = HASH(key);
+ Node const** ptr = lookup_node(hv, key);
+ if (*ptr != NULL) {
+ return const_cast<V*>(&(*ptr)->_value);
+ } else {
+ return NULL;
+ }
+ }
+
+ // Inserts or replaces a value in the table
+ void put(K const& key, V const& value) {
+ unsigned hv = HASH(key);
+ Node** ptr = lookup_node(hv, key);
+ if (*ptr != NULL) {
+ (*ptr)->_value = value;
+ } else {
+ *ptr = new Node(hv, key, value);
+ }
+ }
+
+ // ITER contains bool do_entry(K const&, V const&), which will be
+ // called for each entry in the table. If do_entry() returns false,
+ // the iteration is cancelled.
+ template<class ITER>
+ void iterate(ITER* iter) const {
+ Node* const* bucket = _table;
+ while (bucket < &_table[SIZE]) {
+ Node* node = *bucket;
+ while (node != NULL) {
+ bool cont = iter->do_entry(node->_key, node->_value);
+ if (!cont) { return; }
+ node = node->_next;
+ }
+ ++bucket;
+ }
+ }
+};
+
+
+#endif // SHARE_VM_UTILITIES_RESOURCEHASH_HPP
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hotspot/test/compiler/8002069/Test8002069.java Fri Nov 09 08:20:03 2012 -0800
@@ -0,0 +1,98 @@
+/*
+ * Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ *
+ */
+
+/**
+ * @test
+ * @bug 8002069
+ * @summary Assert failed in C2: assert(field->edge_count() > 0) failed: sanity
+ *
+ * @run main/othervm -Xmx32m -XX:+IgnoreUnrecognizedVMOptions -Xbatch -XX:CompileCommand=exclude,Test8002069.dummy Test8002069
+ */
+
+abstract class O {
+ int f;
+ public O() { f = 5; }
+ abstract void put(int i);
+ public int foo(int i) {
+ put(i);
+ return i;
+ }
+};
+
+class A extends O {
+ int[] a;
+ public A(int s) {
+ a = new int[s];
+ }
+ public void put(int i) {
+ a[i%a.length] = i;
+ }
+}
+
+class B extends O {
+ int sz;
+ int[] a;
+ public B(int s) {
+ sz = s;
+ a = new int[s];
+ }
+ public void put(int i) {
+ a[i%sz] = i;
+ }
+}
+
+public class Test8002069 {
+ public static void main(String args[]) {
+ int sum = 0;
+ for (int i=0; i<8000; i++) {
+ sum += test1(i);
+ }
+ for (int i=0; i<100000; i++) {
+ sum += test2(i);
+ }
+ System.out.println("PASSED. sum = " + sum);
+ }
+
+ private O o;
+
+ private int foo(int i) {
+ return o.foo(i);
+ }
+ static int test1(int i) {
+ Test8002069 t = new Test8002069();
+ t.o = new A(5);
+ return t.foo(i);
+ }
+ static int test2(int i) {
+ Test8002069 t = new Test8002069();
+ t.o = new B(5);
+ dummy(i);
+ return t.foo(i);
+ }
+
+ static int dummy(int i) {
+ return i*2;
+ }
+}
+