8130309: Need to bailout cleanly if creation of stubs fails when codecache is out of space
Summary: Check for failed expansion of stub section in code buffer and bailout.
Reviewed-by: kvn, adinn, dlong, roland, twisti
--- a/hotspot/src/cpu/aarch64/vm/aarch64.ad Tue Jul 28 19:20:42 2015 +0200
+++ b/hotspot/src/cpu/aarch64/vm/aarch64.ad Wed Jul 29 08:05:21 2015 +0200
@@ -2389,9 +2389,11 @@
// Note that the code buffer's insts_mark is always relative to insts.
// That's why we must use the macroassembler to generate a handler.
MacroAssembler _masm(&cbuf);
- address base =
- __ start_a_stub(size_exception_handler());
- if (base == NULL) return 0; // CodeBuffer::expand failed
+ address base = __ start_a_stub(size_exception_handler());
+ if (base == NULL) {
+ ciEnv::current()->record_failure("CodeCache is full");
+ return 0; // CodeBuffer::expand failed
+ }
int offset = __ offset();
__ far_jump(RuntimeAddress(OptoRuntime::exception_blob()->entry_point()));
assert(__ offset() - offset <= (int) size_exception_handler(), "overflow");
@@ -2405,9 +2407,11 @@
// Note that the code buffer's insts_mark is always relative to insts.
// That's why we must use the macroassembler to generate a handler.
MacroAssembler _masm(&cbuf);
- address base =
- __ start_a_stub(size_deopt_handler());
- if (base == NULL) return 0; // CodeBuffer::expand failed
+ address base = __ start_a_stub(size_deopt_handler());
+ if (base == NULL) {
+ ciEnv::current()->record_failure("CodeCache is full");
+ return 0; // CodeBuffer::expand failed
+ }
int offset = __ offset();
__ adr(lr, __ pc());
@@ -3657,24 +3661,37 @@
MacroAssembler _masm(&cbuf);
address addr = (address)$meth$$method;
+ address call;
if (!_method) {
// A call to a runtime wrapper, e.g. new, new_typeArray_Java, uncommon_trap.
- __ trampoline_call(Address(addr, relocInfo::runtime_call_type), &cbuf);
+ call = __ trampoline_call(Address(addr, relocInfo::runtime_call_type), &cbuf);
} else if (_optimized_virtual) {
- __ trampoline_call(Address(addr, relocInfo::opt_virtual_call_type), &cbuf);
+ call = __ trampoline_call(Address(addr, relocInfo::opt_virtual_call_type), &cbuf);
} else {
- __ trampoline_call(Address(addr, relocInfo::static_call_type), &cbuf);
+ call = __ trampoline_call(Address(addr, relocInfo::static_call_type), &cbuf);
+ }
+ if (call == NULL) {
+ ciEnv::current()->record_failure("CodeCache is full");
+ return;
}
if (_method) {
// Emit stub for static call
- CompiledStaticCall::emit_to_interp_stub(cbuf);
+ address stub = CompiledStaticCall::emit_to_interp_stub(cbuf);
+ if (stub == NULL) {
+ ciEnv::current()->record_failure("CodeCache is full");
+ return;
+ }
}
%}
enc_class aarch64_enc_java_dynamic_call(method meth) %{
MacroAssembler _masm(&cbuf);
- __ ic_call((address)$meth$$method);
+ address call = __ ic_call((address)$meth$$method);
+ if (call == NULL) {
+ ciEnv::current()->record_failure("CodeCache is full");
+ return;
+ }
%}
enc_class aarch64_enc_call_epilog() %{
@@ -3695,7 +3712,11 @@
address entry = (address)$meth$$method;
CodeBlob *cb = CodeCache::find_blob(entry);
if (cb) {
- __ trampoline_call(Address(entry, relocInfo::runtime_call_type));
+ address call = __ trampoline_call(Address(entry, relocInfo::runtime_call_type));
+ if (call == NULL) {
+ ciEnv::current()->record_failure("CodeCache is full");
+ return;
+ }
} else {
int gpcnt;
int fpcnt;
--- a/hotspot/src/cpu/aarch64/vm/c1_CodeStubs_aarch64.cpp Tue Jul 28 19:20:42 2015 +0200
+++ b/hotspot/src/cpu/aarch64/vm/c1_CodeStubs_aarch64.cpp Wed Jul 29 08:05:21 2015 +0200
@@ -327,9 +327,16 @@
ce->align_call(lir_static_call);
ce->emit_static_call_stub();
+ if (ce->compilation()->bailed_out()) {
+ return; // CodeCache is full
+ }
Address resolve(SharedRuntime::get_resolve_static_call_stub(),
relocInfo::static_call_type);
- __ trampoline_call(resolve);
+ address call = __ trampoline_call(resolve);
+ if (call == NULL) {
+ bailout("trampoline stub overflow");
+ return;
+ }
ce->add_call_info_here(info());
#ifndef PRODUCT
--- a/hotspot/src/cpu/aarch64/vm/c1_LIRAssembler_aarch64.cpp Tue Jul 28 19:20:42 2015 +0200
+++ b/hotspot/src/cpu/aarch64/vm/c1_LIRAssembler_aarch64.cpp Wed Jul 29 08:05:21 2015 +0200
@@ -1996,13 +1996,21 @@
void LIR_Assembler::call(LIR_OpJavaCall* op, relocInfo::relocType rtype) {
- __ trampoline_call(Address(op->addr(), rtype));
+ address call = __ trampoline_call(Address(op->addr(), rtype));
+ if (call == NULL) {
+ bailout("trampoline stub overflow");
+ return;
+ }
add_call_info(code_offset(), op->info());
}
void LIR_Assembler::ic_call(LIR_OpJavaCall* op) {
- __ ic_call(op->addr());
+ address call = __ ic_call(op->addr());
+ if (call == NULL) {
+ bailout("trampoline stub overflow");
+ return;
+ }
add_call_info(code_offset(), op->info());
}
--- a/hotspot/src/cpu/aarch64/vm/compiledIC_aarch64.cpp Tue Jul 28 19:20:42 2015 +0200
+++ b/hotspot/src/cpu/aarch64/vm/compiledIC_aarch64.cpp Wed Jul 29 08:05:21 2015 +0200
@@ -51,7 +51,7 @@
// ----------------------------------------------------------------------------
#define __ _masm.
-void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
+address CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
// Stub is fixed up when the corresponding call is converted from
// calling compiled code to calling interpreted code.
// mov rmethod, 0
@@ -63,10 +63,11 @@
// That's why we must use the macroassembler to generate a stub.
MacroAssembler _masm(&cbuf);
- address base = __ start_a_stub(to_interp_stub_size()*2);
-
+ address base = __ start_a_stub(to_interp_stub_size());
int offset = __ offset();
- if (base == NULL) return; // CodeBuffer::expand failed
+ if (base == NULL) {
+ return NULL; // CodeBuffer::expand failed
+ }
// static stub relocation stores the instruction address of the call
__ relocate(static_stub_Relocation::spec(mark));
// static stub relocation also tags the Method* in the code-stream.
@@ -76,6 +77,7 @@
assert((__ offset() - offset) <= (int)to_interp_stub_size(), "stub too big");
__ end_a_stub();
+ return base;
}
#undef __
--- a/hotspot/src/cpu/aarch64/vm/macroAssembler_aarch64.cpp Tue Jul 28 19:20:42 2015 +0200
+++ b/hotspot/src/cpu/aarch64/vm/macroAssembler_aarch64.cpp Wed Jul 29 08:05:21 2015 +0200
@@ -664,7 +664,7 @@
// Maybe emit a call via a trampoline. If the code cache is small
// trampolines won't be emitted.
-void MacroAssembler::trampoline_call(Address entry, CodeBuffer *cbuf) {
+address MacroAssembler::trampoline_call(Address entry, CodeBuffer *cbuf) {
assert(entry.rspec().type() == relocInfo::runtime_call_type
|| entry.rspec().type() == relocInfo::opt_virtual_call_type
|| entry.rspec().type() == relocInfo::static_call_type
@@ -672,7 +672,10 @@
unsigned int start_offset = offset();
if (far_branches() && !Compile::current()->in_scratch_emit_size()) {
- emit_trampoline_stub(offset(), entry.target());
+ address stub = emit_trampoline_stub(start_offset, entry.target());
+ if (stub == NULL) {
+ return NULL; // CodeCache is full
+ }
}
if (cbuf) cbuf->set_insts_mark();
@@ -682,6 +685,7 @@
} else {
bl(pc());
}
+ return start_offset;
}
@@ -696,13 +700,11 @@
// load the call target from the constant pool
// branch (LR still points to the call site above)
-void MacroAssembler::emit_trampoline_stub(int insts_call_instruction_offset,
+address MacroAssembler::emit_trampoline_stub(int insts_call_instruction_offset,
address dest) {
address stub = start_a_stub(Compile::MAX_stubs_size/2);
if (stub == NULL) {
- start_a_stub(Compile::MAX_stubs_size/2);
- Compile::current()->env()->record_out_of_memory_failure();
- return;
+ return NULL; // CodeBuffer::expand failed
}
// Create a trampoline stub relocation which relates this trampoline stub
@@ -729,15 +731,16 @@
assert(is_NativeCallTrampolineStub_at(stub_start_addr), "doesn't look like a trampoline");
end_a_stub();
+ return stub;
}
-void MacroAssembler::ic_call(address entry) {
+address MacroAssembler::ic_call(address entry) {
RelocationHolder rh = virtual_call_Relocation::spec(pc());
// address const_ptr = long_constant((jlong)Universe::non_oop_word());
// unsigned long offset;
// ldr_constant(rscratch2, const_ptr);
movptr(rscratch2, (uintptr_t)Universe::non_oop_word());
- trampoline_call(Address(entry, rh));
+ return trampoline_call(Address(entry, rh));
}
// Implementation of call_VM versions
--- a/hotspot/src/cpu/aarch64/vm/macroAssembler_aarch64.hpp Tue Jul 28 19:20:42 2015 +0200
+++ b/hotspot/src/cpu/aarch64/vm/macroAssembler_aarch64.hpp Wed Jul 29 08:05:21 2015 +0200
@@ -539,7 +539,7 @@
static int patch_oop(address insn_addr, address o);
- void emit_trampoline_stub(int insts_call_instruction_offset, address target);
+ address emit_trampoline_stub(int insts_call_instruction_offset, address target);
// The following 4 methods return the offset of the appropriate move instruction
@@ -942,7 +942,7 @@
// Calls
- void trampoline_call(Address entry, CodeBuffer *cbuf = NULL);
+ address trampoline_call(Address entry, CodeBuffer *cbuf = NULL);
static bool far_branches() {
return ReservedCodeCacheSize > branch_range;
@@ -962,7 +962,7 @@
}
// Emit the CompiledIC call idiom
- void ic_call(address entry);
+ address ic_call(address entry);
public:
--- a/hotspot/src/cpu/ppc/vm/compiledIC_ppc.cpp Tue Jul 28 19:20:42 2015 +0200
+++ b/hotspot/src/cpu/ppc/vm/compiledIC_ppc.cpp Wed Jul 29 08:05:21 2015 +0200
@@ -94,7 +94,7 @@
const int IC_pos_in_java_to_interp_stub = 8;
#define __ _masm.
-void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
+address CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
#ifdef COMPILER2
// Get the mark within main instrs section which is set to the address of the call.
address call_addr = cbuf.insts_mark();
@@ -106,8 +106,7 @@
// Start the stub.
address stub = __ start_a_stub(CompiledStaticCall::to_interp_stub_size());
if (stub == NULL) {
- Compile::current()->env()->record_out_of_memory_failure();
- return;
+ return NULL; // CodeCache is full
}
// For java_to_interp stubs we use R11_scratch1 as scratch register
@@ -149,6 +148,7 @@
// End the stub.
__ end_a_stub();
+ return stub;
#else
ShouldNotReachHere();
#endif
--- a/hotspot/src/cpu/ppc/vm/ppc.ad Tue Jul 28 19:20:42 2015 +0200
+++ b/hotspot/src/cpu/ppc/vm/ppc.ad Wed Jul 29 08:05:21 2015 +0200
@@ -1082,7 +1082,7 @@
// Start the stub.
address stub = __ start_a_stub(Compile::MAX_stubs_size/2);
if (stub == NULL) {
- Compile::current()->env()->record_out_of_memory_failure();
+ ciEnv::current()->record_failure("CodeCache is full");
return;
}
@@ -1160,7 +1160,7 @@
// Emit the trampoline stub which will be related to the branch-and-link below.
CallStubImpl::emit_trampoline_stub(_masm, entry_point_toc_offset, offsets.insts_call_instruction_offset);
- if (Compile::current()->env()->failing()) { return offsets; } // Code cache may be full.
+ if (ciEnv::current()->failing()) { return offsets; } // Code cache may be full.
__ relocate(rtype);
}
@@ -3397,7 +3397,7 @@
// Emit the trampoline stub which will be related to the branch-and-link below.
CallStubImpl::emit_trampoline_stub(_masm, entry_point_toc_offset, start_offset);
- if (Compile::current()->env()->failing()) { return; } // Code cache may be full.
+ if (ciEnv::current()->failing()) { return; } // Code cache may be full.
__ relocate(_optimized_virtual ?
relocInfo::opt_virtual_call_type : relocInfo::static_call_type);
}
@@ -3410,7 +3410,11 @@
__ bl(__ pc()); // Emits a relocation.
// The stub for call to interpreter.
- CompiledStaticCall::emit_to_interp_stub(cbuf);
+ address stub = CompiledStaticCall::emit_to_interp_stub(cbuf);
+ if (stub == NULL) {
+ ciEnv::current()->record_failure("CodeCache is full");
+ return;
+ }
}
%}
@@ -3455,7 +3459,11 @@
assert(_method, "execute next statement conditionally");
// The stub for call to interpreter.
- CompiledStaticCall::emit_to_interp_stub(cbuf);
+ address stub = CompiledStaticCall::emit_to_interp_stub(cbuf);
+ if (stub == NULL) {
+ ciEnv::current()->record_failure("CodeCache is full");
+ return;
+ }
// Restore original sp.
__ ld(R11_scratch1, 0, R1_SP); // Load caller sp.
--- a/hotspot/src/cpu/sparc/vm/c1_CodeStubs_sparc.cpp Tue Jul 28 19:20:42 2015 +0200
+++ b/hotspot/src/cpu/sparc/vm/c1_CodeStubs_sparc.cpp Wed Jul 29 08:05:21 2015 +0200
@@ -432,6 +432,9 @@
__ mov(length()->as_register(), O4);
ce->emit_static_call_stub();
+ if (ce->compilation()->bailed_out()) {
+ return; // CodeCache is full
+ }
__ call(SharedRuntime::get_resolve_static_call_stub(), relocInfo::static_call_type);
__ delayed()->nop();
--- a/hotspot/src/cpu/sparc/vm/compiledIC_sparc.cpp Tue Jul 28 19:20:42 2015 +0200
+++ b/hotspot/src/cpu/sparc/vm/compiledIC_sparc.cpp Wed Jul 29 08:05:21 2015 +0200
@@ -53,7 +53,7 @@
// ----------------------------------------------------------------------------
#define __ _masm.
-void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
+address CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
#ifdef COMPILER2
// Stub is fixed up when the corresponding call is converted from calling
// compiled code to calling interpreted code.
@@ -64,9 +64,10 @@
MacroAssembler _masm(&cbuf);
- address base =
- __ start_a_stub(to_interp_stub_size()*2);
- if (base == NULL) return; // CodeBuffer::expand failed.
+ address base = __ start_a_stub(to_interp_stub_size());
+ if (base == NULL) {
+ return NULL; // CodeBuffer::expand failed.
+ }
// Static stub relocation stores the instruction address of the call.
__ relocate(static_stub_Relocation::spec(mark));
@@ -81,6 +82,7 @@
// Update current stubs pointer and restore code_end.
__ end_a_stub();
+ return base;
#else
ShouldNotReachHere();
#endif
--- a/hotspot/src/cpu/sparc/vm/sparc.ad Tue Jul 28 19:20:42 2015 +0200
+++ b/hotspot/src/cpu/sparc/vm/sparc.ad Wed Jul 29 08:05:21 2015 +0200
@@ -1773,9 +1773,11 @@
AddressLiteral exception_blob(OptoRuntime::exception_blob()->entry_point());
MacroAssembler _masm(&cbuf);
- address base =
- __ start_a_stub(size_exception_handler());
- if (base == NULL) return 0; // CodeBuffer::expand failed
+ address base = __ start_a_stub(size_exception_handler());
+ if (base == NULL) {
+ ciEnv::current()->record_failure("CodeCache is full");
+ return 0; // CodeBuffer::expand failed
+ }
int offset = __ offset();
@@ -1796,9 +1798,11 @@
AddressLiteral deopt_blob(SharedRuntime::deopt_blob()->unpack());
MacroAssembler _masm(&cbuf);
- address base =
- __ start_a_stub(size_deopt_handler());
- if (base == NULL) return 0; // CodeBuffer::expand failed
+ address base = __ start_a_stub(size_deopt_handler());
+ if (base == NULL) {
+ ciEnv::current()->record_failure("CodeCache is full");
+ return 0; // CodeBuffer::expand failed
+ }
int offset = __ offset();
__ save_frame(0);
@@ -2599,7 +2603,12 @@
emit_call_reloc(cbuf, $meth$$method, relocInfo::static_call_type);
}
if (_method) { // Emit stub for static call.
- CompiledStaticCall::emit_to_interp_stub(cbuf);
+ address stub = CompiledStaticCall::emit_to_interp_stub(cbuf);
+ // Stub does not fit into scratch buffer if TraceJumps is enabled
+ if (stub == NULL && !(TraceJumps && Compile::current()->in_scratch_emit_size())) {
+ ciEnv::current()->record_failure("CodeCache is full");
+ return;
+ }
}
%}
--- a/hotspot/src/cpu/x86/vm/c1_CodeStubs_x86.cpp Tue Jul 28 19:20:42 2015 +0200
+++ b/hotspot/src/cpu/x86/vm/c1_CodeStubs_x86.cpp Wed Jul 29 08:05:21 2015 +0200
@@ -503,6 +503,9 @@
ce->align_call(lir_static_call);
ce->emit_static_call_stub();
+ if (ce->compilation()->bailed_out()) {
+ return; // CodeCache is full
+ }
AddressLiteral resolve(SharedRuntime::get_resolve_static_call_stub(),
relocInfo::static_call_type);
__ call(resolve);
--- a/hotspot/src/cpu/x86/vm/compiledIC_x86.cpp Tue Jul 28 19:20:42 2015 +0200
+++ b/hotspot/src/cpu/x86/vm/compiledIC_x86.cpp Wed Jul 29 08:05:21 2015 +0200
@@ -50,7 +50,7 @@
// ----------------------------------------------------------------------------
#define __ _masm.
-void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
+address CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
// Stub is fixed up when the corresponding call is converted from
// calling compiled code to calling interpreted code.
// movq rbx, 0
@@ -62,9 +62,10 @@
// That's why we must use the macroassembler to generate a stub.
MacroAssembler _masm(&cbuf);
- address base =
- __ start_a_stub(to_interp_stub_size()*2);
- if (base == NULL) return; // CodeBuffer::expand failed.
+ address base = __ start_a_stub(to_interp_stub_size());
+ if (base == NULL) {
+ return NULL; // CodeBuffer::expand failed.
+ }
// Static stub relocation stores the instruction address of the call.
__ relocate(static_stub_Relocation::spec(mark), Assembler::imm_operand);
// Static stub relocation also tags the Method* in the code-stream.
@@ -74,6 +75,7 @@
// Update current stubs pointer and restore insts_end.
__ end_a_stub();
+ return base;
}
#undef __
--- a/hotspot/src/cpu/x86/vm/x86.ad Tue Jul 28 19:20:42 2015 +0200
+++ b/hotspot/src/cpu/x86/vm/x86.ad Wed Jul 29 08:05:21 2015 +0200
@@ -1594,7 +1594,10 @@
// That's why we must use the macroassembler to generate a handler.
MacroAssembler _masm(&cbuf);
address base = __ start_a_stub(size_exception_handler());
- if (base == NULL) return 0; // CodeBuffer::expand failed
+ if (base == NULL) {
+ ciEnv::current()->record_failure("CodeCache is full");
+ return 0; // CodeBuffer::expand failed
+ }
int offset = __ offset();
__ jump(RuntimeAddress(OptoRuntime::exception_blob()->entry_point()));
assert(__ offset() - offset <= (int) size_exception_handler(), "overflow");
@@ -1609,7 +1612,10 @@
// That's why we must use the macroassembler to generate a handler.
MacroAssembler _masm(&cbuf);
address base = __ start_a_stub(size_deopt_handler());
- if (base == NULL) return 0; // CodeBuffer::expand failed
+ if (base == NULL) {
+ ciEnv::current()->record_failure("CodeCache is full");
+ return 0; // CodeBuffer::expand failed
+ }
int offset = __ offset();
#ifdef _LP64
--- a/hotspot/src/cpu/x86/vm/x86_32.ad Tue Jul 28 19:20:42 2015 +0200
+++ b/hotspot/src/cpu/x86/vm/x86_32.ad Wed Jul 29 08:05:21 2015 +0200
@@ -1907,7 +1907,11 @@
static_call_Relocation::spec(), RELOC_IMM32 );
}
if (_method) { // Emit stub for static call.
- CompiledStaticCall::emit_to_interp_stub(cbuf);
+ address stub = CompiledStaticCall::emit_to_interp_stub(cbuf);
+ if (stub == NULL) {
+ ciEnv::current()->record_failure("CodeCache is full");
+ return;
+ }
}
%}
--- a/hotspot/src/cpu/x86/vm/x86_64.ad Tue Jul 28 19:20:42 2015 +0200
+++ b/hotspot/src/cpu/x86/vm/x86_64.ad Wed Jul 29 08:05:21 2015 +0200
@@ -2137,7 +2137,11 @@
}
if (_method) {
// Emit stub for static call.
- CompiledStaticCall::emit_to_interp_stub(cbuf);
+ address stub = CompiledStaticCall::emit_to_interp_stub(cbuf);
+ if (stub == NULL) {
+ ciEnv::current()->record_failure("CodeCache is full");
+ return;
+ }
}
%}
--- a/hotspot/src/cpu/zero/vm/compiledIC_zero.cpp Tue Jul 28 19:20:42 2015 +0200
+++ b/hotspot/src/cpu/zero/vm/compiledIC_zero.cpp Wed Jul 29 08:05:21 2015 +0200
@@ -60,8 +60,9 @@
// ----------------------------------------------------------------------------
-void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
+address CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
ShouldNotReachHere(); // Only needed for COMPILER2.
+ return NULL;
}
int CompiledStaticCall::to_interp_stub_size() {
--- a/hotspot/src/share/vm/c1/c1_LIRAssembler.cpp Tue Jul 28 19:20:42 2015 +0200
+++ b/hotspot/src/share/vm/c1/c1_LIRAssembler.cpp Wed Jul 29 08:05:21 2015 +0200
@@ -443,6 +443,7 @@
// emit the static call stub stuff out of line
emit_static_call_stub();
+ CHECK_BAILOUT();
switch (op->code()) {
case lir_static_call:
--- a/hotspot/src/share/vm/code/compiledIC.hpp Tue Jul 28 19:20:42 2015 +0200
+++ b/hotspot/src/share/vm/code/compiledIC.hpp Wed Jul 29 08:05:21 2015 +0200
@@ -306,7 +306,7 @@
friend CompiledStaticCall* compiledStaticCall_at(Relocation* call_site);
// Code
- static void emit_to_interp_stub(CodeBuffer &cbuf);
+ static address emit_to_interp_stub(CodeBuffer &cbuf);
static int to_interp_stub_size();
static int reloc_to_interp_stub();
--- a/hotspot/src/share/vm/opto/compile.cpp Tue Jul 28 19:20:42 2015 +0200
+++ b/hotspot/src/share/vm/opto/compile.cpp Wed Jul 29 08:05:21 2015 +0200
@@ -594,6 +594,10 @@
n->as_MachBranch()->label_set(&fakeL, 0);
}
n->emit(buf, this->regalloc());
+
+ // Emitting into the scratch buffer should not fail
+ assert (!failing(), err_msg_res("Must not have pending failure. Reason is: %s", failure_reason()));
+
if (is_branch) // Restore label.
n->as_MachBranch()->label_set(saveL, save_bnum);
--- a/hotspot/src/share/vm/opto/output.cpp Tue Jul 28 19:20:42 2015 +0200
+++ b/hotspot/src/share/vm/opto/output.cpp Wed Jul 29 08:05:21 2015 +0200
@@ -1504,6 +1504,13 @@
n->emit(*cb, _regalloc);
current_offset = cb->insts_size();
+ // Above we only verified that there is enough space in the instruction section.
+ // However, the instruction may emit stubs that cause code buffer expansion.
+ // Bail out here if expansion failed due to a lack of code cache space.
+ if (failing()) {
+ return;
+ }
+
#ifdef ASSERT
if (n->size(_regalloc) < (current_offset-instr_offset)) {
n->dump();
@@ -1632,11 +1639,14 @@
if (_method) {
// Emit the exception handler code.
_code_offsets.set_value(CodeOffsets::Exceptions, HandlerImpl::emit_exception_handler(*cb));
+ if (failing()) {
+ return; // CodeBuffer::expand failed
+ }
// Emit the deopt handler code.
_code_offsets.set_value(CodeOffsets::Deopt, HandlerImpl::emit_deopt_handler(*cb));
// Emit the MethodHandle deopt handler code (if required).
- if (has_method_handle_invokes()) {
+ if (has_method_handle_invokes() && !failing()) {
// We can use the same code as for the normal deopt handler, we
// just need a different entry point address.
_code_offsets.set_value(CodeOffsets::DeoptMH, HandlerImpl::emit_deopt_handler(*cb));