--- a/hotspot/src/cpu/zero/vm/frame_zero.cpp Thu Jan 17 01:27:02 2013 -0500
+++ b/hotspot/src/cpu/zero/vm/frame_zero.cpp Thu Jan 17 18:47:36 2013 -0800
@@ -98,10 +98,20 @@
#endif // CC_INTERP
void frame::patch_pc(Thread* thread, address pc) {
- // We borrow this call to set the thread pointer in the interpreter
- // state; the hook to set up deoptimized frames isn't supplied it.
- assert(pc == NULL, "should be");
- get_interpreterState()->set_thread((JavaThread *) thread);
+
+ if (pc != NULL) {
+ _cb = CodeCache::find_blob(pc);
+ SharkFrame* sharkframe = zeroframe()->as_shark_frame();
+ sharkframe->set_pc(pc);
+ _pc = pc;
+ _deopt_state = is_deoptimized;
+
+ } else {
+ // We borrow this call to set the thread pointer in the interpreter
+ // state; the hook to set up deoptimized frames isn't supplied it.
+ assert(pc == NULL, "should be");
+ get_interpreterState()->set_thread((JavaThread *) thread);
+ }
}
bool frame::safe_for_sender(JavaThread *thread) {
--- a/hotspot/src/cpu/zero/vm/frame_zero.inline.hpp Thu Jan 17 01:27:02 2013 -0500
+++ b/hotspot/src/cpu/zero/vm/frame_zero.inline.hpp Thu Jan 17 18:47:36 2013 -0800
@@ -45,27 +45,36 @@
case ZeroFrame::ENTRY_FRAME:
_pc = StubRoutines::call_stub_return_pc();
_cb = NULL;
+ _deopt_state = not_deoptimized;
break;
case ZeroFrame::INTERPRETER_FRAME:
_pc = NULL;
_cb = NULL;
+ _deopt_state = not_deoptimized;
break;
- case ZeroFrame::SHARK_FRAME:
+ case ZeroFrame::SHARK_FRAME: {
_pc = zero_sharkframe()->pc();
_cb = CodeCache::find_blob_unsafe(pc());
+ address original_pc = nmethod::get_deopt_original_pc(this);
+ if (original_pc != NULL) {
+ _pc = original_pc;
+ _deopt_state = is_deoptimized;
+ } else {
+ _deopt_state = not_deoptimized;
+ }
break;
-
+ }
case ZeroFrame::FAKE_STUB_FRAME:
_pc = NULL;
_cb = NULL;
+ _deopt_state = not_deoptimized;
break;
default:
ShouldNotReachHere();
}
- _deopt_state = not_deoptimized;
}
// Accessors
--- a/hotspot/src/cpu/zero/vm/sharkFrame_zero.hpp Thu Jan 17 01:27:02 2013 -0500
+++ b/hotspot/src/cpu/zero/vm/sharkFrame_zero.hpp Thu Jan 17 18:47:36 2013 -0800
@@ -68,6 +68,10 @@
return (address) value_of_word(pc_off);
}
+ void set_pc(address pc) const {
+ *((address*) addr_of_word(pc_off)) = pc;
+ }
+
intptr_t* unextended_sp() const {
return (intptr_t *) value_of_word(unextended_sp_off);
}
--- a/hotspot/src/share/vm/c1/c1_GraphBuilder.cpp Thu Jan 17 01:27:02 2013 -0500
+++ b/hotspot/src/share/vm/c1/c1_GraphBuilder.cpp Thu Jan 17 18:47:36 2013 -0800
@@ -3223,7 +3223,12 @@
}
if (try_inline_full(callee, holder_known, bc, receiver))
return true;
- print_inlining(callee, _inline_bailout_msg, /*success*/ false);
+
+ // Entire compilation could fail during try_inline_full call.
+ // In that case printing inlining decision info is useless.
+ if (!bailed_out())
+ print_inlining(callee, _inline_bailout_msg, /*success*/ false);
+
return false;
}
@@ -3753,7 +3758,8 @@
push_scope(callee, cont);
// the BlockListBuilder for the callee could have bailed out
- CHECK_BAILOUT_(false);
+ if (bailed_out())
+ return false;
// Temporarily set up bytecode stream so we can append instructions
// (only using the bci of this stream)
@@ -3819,7 +3825,8 @@
iterate_all_blocks(callee_start_block == NULL);
// If we bailed out during parsing, return immediately (this is bad news)
- if (bailed_out()) return false;
+ if (bailed_out())
+ return false;
// iterate_all_blocks theoretically traverses in random order; in
// practice, we have only traversed the continuation if we are
@@ -3828,9 +3835,6 @@
!continuation()->is_set(BlockBegin::was_visited_flag),
"continuation should not have been parsed yet if we created it");
- // If we bailed out during parsing, return immediately (this is bad news)
- CHECK_BAILOUT_(false);
-
// At this point we are almost ready to return and resume parsing of
// the caller back in the GraphBuilder. The only thing we want to do
// first is an optimization: during parsing of the callee we
@@ -4171,7 +4175,10 @@
else
log->inline_success("receiver is statically known");
} else {
- log->inline_fail(msg);
+ if (msg != NULL)
+ log->inline_fail(msg);
+ else
+ log->inline_fail("reason unknown");
}
}
--- a/hotspot/src/share/vm/ci/ciType.cpp Thu Jan 17 01:27:02 2013 -0500
+++ b/hotspot/src/share/vm/ci/ciType.cpp Thu Jan 17 18:47:36 2013 -0800
@@ -60,6 +60,19 @@
}
// ------------------------------------------------------------------
+// ciType::name
+//
+// Return the name of this type
+const char* ciType::name() {
+ if (is_primitive_type()) {
+ return type2name(basic_type());
+ } else {
+ assert(is_klass(), "must be");
+ return as_klass()->name()->as_utf8();
+ }
+}
+
+// ------------------------------------------------------------------
// ciType::print_impl
//
// Implementation of the print method.
@@ -73,7 +86,8 @@
//
// Print the name of this type
void ciType::print_name_on(outputStream* st) {
- st->print(type2name(basic_type()));
+ ResourceMark rm;
+ st->print(name());
}
--- a/hotspot/src/share/vm/ci/ciType.hpp Thu Jan 17 01:27:02 2013 -0500
+++ b/hotspot/src/share/vm/ci/ciType.hpp Thu Jan 17 18:47:36 2013 -0800
@@ -77,6 +77,7 @@
bool is_type() const { return true; }
bool is_classless() const { return is_primitive_type(); }
+ const char* name();
virtual void print_name_on(outputStream* st);
void print_name() {
print_name_on(tty);
--- a/hotspot/src/share/vm/compiler/abstractCompiler.hpp Thu Jan 17 01:27:02 2013 -0500
+++ b/hotspot/src/share/vm/compiler/abstractCompiler.hpp Thu Jan 17 18:47:36 2013 -0800
@@ -50,6 +50,7 @@
// Missing feature tests
virtual bool supports_native() { return true; }
virtual bool supports_osr () { return true; }
+ virtual bool can_compile_method(methodHandle method) { return true; }
#if defined(TIERED) || ( !defined(COMPILER1) && !defined(COMPILER2) && !defined(SHARK))
virtual bool is_c1 () { return false; }
virtual bool is_c2 () { return false; }
--- a/hotspot/src/share/vm/compiler/compileBroker.cpp Thu Jan 17 01:27:02 2013 -0500
+++ b/hotspot/src/share/vm/compiler/compileBroker.cpp Thu Jan 17 18:47:36 2013 -0800
@@ -1218,7 +1218,7 @@
// lock, make sure that the compilation
// isn't prohibited in a straightforward way.
- if (compiler(comp_level) == NULL || compilation_is_prohibited(method, osr_bci, comp_level)) {
+ if (compiler(comp_level) == NULL || !compiler(comp_level)->can_compile_method(method) || compilation_is_prohibited(method, osr_bci, comp_level)) {
return NULL;
}
--- a/hotspot/src/share/vm/opto/compile.cpp Thu Jan 17 01:27:02 2013 -0500
+++ b/hotspot/src/share/vm/opto/compile.cpp Thu Jan 17 18:47:36 2013 -0800
@@ -692,7 +692,7 @@
PhaseGVN gvn(node_arena(), estimated_size);
set_initial_gvn(&gvn);
- if (PrintInlining) {
+ if (PrintInlining || PrintIntrinsics NOT_PRODUCT( || PrintOptoInlining)) {
_print_inlining_list = new (comp_arena())GrowableArray<PrintInliningBuffer>(comp_arena(), 1, 1, PrintInliningBuffer());
}
{ // Scope for timing the parser
@@ -2049,7 +2049,7 @@
} // (End scope of igvn; run destructor if necessary for asserts.)
- dump_inlining();
+ dump_inlining();
// A method with only infinite loops has no edges entering loops from root
{
NOT_PRODUCT( TracePhase t2("graphReshape", &_t_graphReshaping, TimeCompiler); )
@@ -3497,7 +3497,7 @@
}
void Compile::dump_inlining() {
- if (PrintInlining) {
+ if (PrintInlining || PrintIntrinsics NOT_PRODUCT( || PrintOptoInlining)) {
// Print inlining message for candidates that we couldn't inline
// for lack of space or non constant receiver
for (int i = 0; i < _late_inlines.length(); i++) {
--- a/hotspot/src/share/vm/opto/doCall.cpp Thu Jan 17 01:27:02 2013 -0500
+++ b/hotspot/src/share/vm/opto/doCall.cpp Thu Jan 17 18:47:36 2013 -0800
@@ -553,7 +553,13 @@
rtype = ctype;
}
} else {
- assert(rtype == ctype, "mismatched return types"); // symbolic resolution enforces this
+ // Symbolic resolution enforces the types to be the same.
+ // NOTE: We must relax the assert for unloaded types because two
+ // different ciType instances of the same unloaded class type
+ // can appear to be "loaded" by different loaders (depending on
+ // the accessing class).
+ assert(!rtype->is_loaded() || !ctype->is_loaded() || rtype == ctype,
+ err_msg_res("mismatched return types: rtype=%s, ctype=%s", rtype->name(), ctype->name()));
}
// If the return type of the method is not loaded, assert that the
--- a/hotspot/src/share/vm/opto/library_call.cpp Thu Jan 17 01:27:02 2013 -0500
+++ b/hotspot/src/share/vm/opto/library_call.cpp Thu Jan 17 18:47:36 2013 -0800
@@ -3559,7 +3559,6 @@
// public static <T,U> T[] java.util.Arrays.copyOf( U[] original, int newLength, Class<? extends T[]> newType);
// public static <T,U> T[] java.util.Arrays.copyOfRange(U[] original, int from, int to, Class<? extends T[]> newType);
bool LibraryCallKit::inline_array_copyOf(bool is_copyOfRange) {
- return false;
if (too_many_traps(Deoptimization::Reason_intrinsic)) return false;
// Get the arguments.
--- a/hotspot/src/share/vm/shark/sharkBlock.cpp Thu Jan 17 01:27:02 2013 -0500
+++ b/hotspot/src/share/vm/shark/sharkBlock.cpp Thu Jan 17 18:47:36 2013 -0800
@@ -1032,7 +1032,7 @@
check_null(value);
object = value->generic_value();
}
- if (is_get && field->is_constant()) {
+ if (is_get && field->is_constant() && field->is_static()) {
SharkConstant *constant = SharkConstant::for_field(iter());
if (constant->is_loaded())
value = constant->value(builder());
@@ -1044,10 +1044,17 @@
BasicType basic_type = field->type()->basic_type();
Type *stack_type = SharkType::to_stackType(basic_type);
Type *field_type = SharkType::to_arrayType(basic_type);
-
+ Type *type = field_type;
+ if (field->is_volatile()) {
+ if (field_type == SharkType::jfloat_type()) {
+ type = SharkType::jint_type();
+ } else if (field_type == SharkType::jdouble_type()) {
+ type = SharkType::jlong_type();
+ }
+ }
Value *addr = builder()->CreateAddressOfStructEntry(
object, in_ByteSize(field->offset_in_bytes()),
- PointerType::getUnqual(field_type),
+ PointerType::getUnqual(type),
"addr");
// Do the access
@@ -1055,6 +1062,7 @@
Value* field_value;
if (field->is_volatile()) {
field_value = builder()->CreateAtomicLoad(addr);
+ field_value = builder()->CreateBitCast(field_value, field_type);
} else {
field_value = builder()->CreateLoad(addr);
}
@@ -1074,6 +1082,7 @@
}
if (field->is_volatile()) {
+ field_value = builder()->CreateBitCast(field_value, type);
builder()->CreateAtomicStore(field_value, addr);
} else {
builder()->CreateStore(field_value, addr);
--- a/hotspot/src/share/vm/shark/sharkCompiler.cpp Thu Jan 17 01:27:02 2013 -0500
+++ b/hotspot/src/share/vm/shark/sharkCompiler.cpp Thu Jan 17 18:47:36 2013 -0800
@@ -185,6 +185,9 @@
// Build the LLVM IR for the method
Function *function = SharkFunction::build(env, &builder, flow, name);
+ if (env->failing()) {
+ return;
+ }
// Generate native code. It's unpleasant that we have to drop into
// the VM to do this -- it blocks safepoints -- but I can't see any
--- a/hotspot/src/share/vm/shark/sharkCompiler.hpp Thu Jan 17 01:27:02 2013 -0500
+++ b/hotspot/src/share/vm/shark/sharkCompiler.hpp Thu Jan 17 18:47:36 2013 -0800
@@ -46,6 +46,9 @@
// Missing feature tests
bool supports_native() { return true; }
bool supports_osr() { return true; }
+ bool can_compile_method(methodHandle method) {
+ return ! (method->is_method_handle_intrinsic() || method->is_compiled_lambda_form());
+ }
// Customization
bool needs_adapters() { return false; }
--- a/hotspot/src/share/vm/shark/sharkConstant.cpp Thu Jan 17 01:27:02 2013 -0500
+++ b/hotspot/src/share/vm/shark/sharkConstant.cpp Thu Jan 17 18:47:36 2013 -0800
@@ -37,7 +37,12 @@
ciType *type = NULL;
if (constant.basic_type() == T_OBJECT) {
ciEnv *env = ciEnv::current();
- assert(constant.as_object()->klass() == env->String_klass() || constant.as_object()->klass() == env->Class_klass(), "should be");
+
+ assert(constant.as_object()->klass() == env->String_klass()
+ || constant.as_object()->klass() == env->Class_klass()
+ || constant.as_object()->klass()->is_subtype_of(env->MethodType_klass())
+ || constant.as_object()->klass()->is_subtype_of(env->MethodHandle_klass()), "should be");
+
type = constant.as_object()->klass();
}
return new SharkConstant(constant, type);
--- a/hotspot/src/share/vm/shark/sharkFunction.cpp Thu Jan 17 01:27:02 2013 -0500
+++ b/hotspot/src/share/vm/shark/sharkFunction.cpp Thu Jan 17 18:47:36 2013 -0800
@@ -77,6 +77,10 @@
// Walk the tree from the start block to determine which
// blocks are entered and which blocks require phis
SharkTopLevelBlock *start_block = block(flow()->start_block_num());
+ if (is_osr() && start_block->stack_depth_at_entry() != 0) {
+ env()->record_method_not_compilable("can't compile OSR block with incoming stack-depth > 0");
+ return;
+ }
assert(start_block->start() == flow()->start_bci(), "blocks out of order");
start_block->enter();
--- a/hotspot/src/share/vm/shark/sharkInliner.cpp Thu Jan 17 01:27:02 2013 -0500
+++ b/hotspot/src/share/vm/shark/sharkInliner.cpp Thu Jan 17 18:47:36 2013 -0800
@@ -725,7 +725,7 @@
// Push the result if necessary
if (is_get) {
bool result_pushed = false;
- if (field->is_constant()) {
+ if (field->is_constant() && field->is_static()) {
SharkConstant *sc = SharkConstant::for_field(iter());
if (sc->is_loaded()) {
push(sc->is_nonzero());
--- a/hotspot/src/share/vm/shark/sharkInvariants.hpp Thu Jan 17 01:27:02 2013 -0500
+++ b/hotspot/src/share/vm/shark/sharkInvariants.hpp Thu Jan 17 18:47:36 2013 -0800
@@ -68,7 +68,7 @@
//
// Accessing this directly is kind of ugly, so it's private. Add
// new accessors below if you need something from it.
- private:
+ protected:
ciEnv* env() const {
assert(_env != NULL, "env not available");
return _env;
@@ -99,12 +99,14 @@
DebugInformationRecorder* debug_info() const {
return env()->debug_info();
}
+ SharkCodeBuffer* code_buffer() const {
+ return builder()->code_buffer();
+ }
+
+ public:
Dependencies* dependencies() const {
return env()->dependencies();
}
- SharkCodeBuffer* code_buffer() const {
- return builder()->code_buffer();
- }
// Commonly used classes
protected:
--- a/hotspot/src/share/vm/shark/sharkTopLevelBlock.cpp Thu Jan 17 01:27:02 2013 -0500
+++ b/hotspot/src/share/vm/shark/sharkTopLevelBlock.cpp Thu Jan 17 18:47:36 2013 -0800
@@ -113,7 +113,19 @@
ciSignature* sig;
method = iter()->get_method(will_link, &sig);
assert(will_link, "typeflow responsibility");
-
+ // We can't compile calls to method handle intrinsics, because we use
+ // the interpreter entry points and they expect the top frame to be an
+ // interpreter frame. We need to implement the intrinsics for Shark.
+ if (method->is_method_handle_intrinsic() || method->is_compiled_lambda_form()) {
+ if (SharkPerformanceWarnings) {
+ warning("JSR292 optimization not yet implemented in Shark");
+ }
+ set_trap(
+ Deoptimization::make_trap_request(
+ Deoptimization::Reason_unhandled,
+ Deoptimization::Action_make_not_compilable), bci());
+ return;
+ }
if (!method->holder()->is_linked()) {
set_trap(
Deoptimization::make_trap_request(
@@ -158,6 +170,16 @@
return;
}
break;
+ case Bytecodes::_invokedynamic:
+ case Bytecodes::_invokehandle:
+ if (SharkPerformanceWarnings) {
+ warning("JSR292 optimization not yet implemented in Shark");
+ }
+ set_trap(
+ Deoptimization::make_trap_request(
+ Deoptimization::Reason_unhandled,
+ Deoptimization::Action_make_not_compilable), bci());
+ return;
}
}
@@ -1030,7 +1052,6 @@
dest_method->holder() == java_lang_Object_klass())
return dest_method;
-#ifdef SHARK_CAN_DEOPTIMIZE_ANYWHERE
// This code can replace a virtual call with a direct call if this
// class is the only one in the entire set of loaded classes that
// implements this method. This makes the compiled code dependent
@@ -1064,6 +1085,8 @@
if (monomorphic_target != NULL) {
assert(!monomorphic_target->is_abstract(), "shouldn't be");
+ function()->dependencies()->assert_unique_concrete_method(actual_receiver, monomorphic_target);
+
// Opto has a bunch of type checking here that I don't
// understand. It's to inhibit casting in one direction,
// possibly because objects in Opto can have inexact
@@ -1097,7 +1120,6 @@
// with non-monomorphic targets if the receiver has an exact
// type. We don't mark types this way, so we can't do this.
-#endif // SHARK_CAN_DEOPTIMIZE_ANYWHERE
return NULL;
}
@@ -1298,8 +1320,9 @@
// Try to inline the call
if (!call_is_virtual) {
- if (SharkInliner::attempt_inline(call_method, current_state()))
+ if (SharkInliner::attempt_inline(call_method, current_state())) {
return;
+ }
}
// Find the method we are calling
--- a/hotspot/test/compiler/7190310/Test7190310.java Thu Jan 17 01:27:02 2013 -0500
+++ b/hotspot/test/compiler/7190310/Test7190310.java Thu Jan 17 18:47:36 2013 -0800
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -23,7 +23,16 @@
*/
/*
- * Manual test
+ * @test
+ * @bug 7190310
+ * @summary Inlining WeakReference.get(), and hoisting $referent may lead to non-terminating loops
+ * @run main/othervm/timeout=600 -Xbatch Test7190310
+ */
+
+/*
+ * Note bug exhibits as infinite loop, timeout is helpful.
+ * It should normally finish pretty quickly, but on some especially slow machines
+ * it may not. The companion _unsafe test lacks a timeout, but that is okay.
*/
import java.lang.ref.*;