# HG changeset patch # User vlivanov # Date 1449261979 -10800 # Node ID bbf32241d851adf2faca1bf820e2d270eb9dda93 # Parent 839c8ba29724c12aa0e1b4f868b6c361ff914a4e 8072008: Emit direct call instead of linkTo* for recursive indy/MH.invoke* calls Reviewed-by: jrose, dlong, aph, forax diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/cpu/aarch64/vm/aarch64.ad --- a/hotspot/src/cpu/aarch64/vm/aarch64.ad Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/cpu/aarch64/vm/aarch64.ad Fri Dec 04 23:46:19 2015 +0300 @@ -4667,17 +4667,12 @@ if (!_method) { // A call to a runtime wrapper, e.g. new, new_typeArray_Java, uncommon_trap. call = __ trampoline_call(Address(addr, relocInfo::runtime_call_type), &cbuf); - } else if (_optimized_virtual) { - call = __ trampoline_call(Address(addr, relocInfo::opt_virtual_call_type), &cbuf); } else { - call = __ trampoline_call(Address(addr, relocInfo::static_call_type), &cbuf); - } - if (call == NULL) { - ciEnv::current()->record_failure("CodeCache is full"); - return; - } - - if (_method) { + int method_index = resolved_method_index(cbuf); + RelocationHolder rspec = _optimized_virtual ? opt_virtual_call_Relocation::spec(method_index) + : static_call_Relocation::spec(method_index); + call = __ trampoline_call(Address(addr, rspec), &cbuf); + // Emit stub for static call address stub = CompiledStaticCall::emit_to_interp_stub(cbuf); if (stub == NULL) { @@ -4685,11 +4680,16 @@ return; } } + if (call == NULL) { + ciEnv::current()->record_failure("CodeCache is full"); + return; + } %} enc_class aarch64_enc_java_dynamic_call(method meth) %{ MacroAssembler _masm(&cbuf); - address call = __ ic_call((address)$meth$$method); + int method_index = resolved_method_index(cbuf); + address call = __ ic_call((address)$meth$$method, method_index); if (call == NULL) { ciEnv::current()->record_failure("CodeCache is full"); return; diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/cpu/aarch64/vm/macroAssembler_aarch64.cpp --- a/hotspot/src/cpu/aarch64/vm/macroAssembler_aarch64.cpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/cpu/aarch64/vm/macroAssembler_aarch64.cpp Fri Dec 04 23:46:19 2015 +0300 @@ -732,8 +732,8 @@ return stub; } -address MacroAssembler::ic_call(address entry) { - RelocationHolder rh = virtual_call_Relocation::spec(pc()); +address MacroAssembler::ic_call(address entry, jint method_index) { + RelocationHolder rh = virtual_call_Relocation::spec(pc(), method_index); // address const_ptr = long_constant((jlong)Universe::non_oop_word()); // unsigned long offset; // ldr_constant(rscratch2, const_ptr); diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/cpu/aarch64/vm/macroAssembler_aarch64.hpp --- a/hotspot/src/cpu/aarch64/vm/macroAssembler_aarch64.hpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/cpu/aarch64/vm/macroAssembler_aarch64.hpp Fri Dec 04 23:46:19 2015 +0300 @@ -983,7 +983,7 @@ } // Emit the CompiledIC call idiom - address ic_call(address entry); + address ic_call(address entry, jint method_index = 0); public: diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/cpu/ppc/vm/ppc.ad --- a/hotspot/src/cpu/ppc/vm/ppc.ad Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/cpu/ppc/vm/ppc.ad Fri Dec 04 23:46:19 2015 +0300 @@ -3396,11 +3396,13 @@ } const int entry_point_toc_offset = __ offset_to_method_toc(entry_point_toc_addr); + // Emit the trampoline stub which will be related to the branch-and-link below. CallStubImpl::emit_trampoline_stub(_masm, entry_point_toc_offset, start_offset); if (ciEnv::current()->failing()) { return; } // Code cache may be full. - __ relocate(_optimized_virtual ? - relocInfo::opt_virtual_call_type : relocInfo::static_call_type); + int method_index = resolved_method_index(cbuf); + __ relocate(_optimized_virtual ? opt_virtual_call_Relocate::spec(method_index) + : static_call_Relocate::spec(method_index)); } // The real call. @@ -3450,8 +3452,8 @@ const address virtual_call_oop_addr = __ addr_at(virtual_call_oop_addr_offset); assert(MacroAssembler::is_load_const_from_method_toc_at(virtual_call_oop_addr), "should be load from TOC"); - - __ relocate(virtual_call_Relocation::spec(virtual_call_oop_addr)); + int method_index = resolved_method_index(cbuf); + __ relocate(virtual_call_Relocation::spec(virtual_call_oop_addr, method_index)); } // At this point I do not have the address of the trampoline stub, diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/cpu/sparc/vm/assembler_sparc.hpp --- a/hotspot/src/cpu/sparc/vm/assembler_sparc.hpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/cpu/sparc/vm/assembler_sparc.hpp Fri Dec 04 23:46:19 2015 +0300 @@ -816,6 +816,8 @@ inline void call( address d, relocInfo::relocType rt = relocInfo::runtime_call_type ); inline void call( Label& L, relocInfo::relocType rt = relocInfo::runtime_call_type ); + inline void call( address d, RelocationHolder const& rspec ); + public: // pp 150 diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/cpu/sparc/vm/assembler_sparc.inline.hpp --- a/hotspot/src/cpu/sparc/vm/assembler_sparc.inline.hpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/cpu/sparc/vm/assembler_sparc.inline.hpp Fri Dec 04 23:46:19 2015 +0300 @@ -76,6 +76,8 @@ inline void Assembler::call( address d, relocInfo::relocType rt ) { insert_nop_after_cbcond(); cti(); emit_data( op(call_op) | wdisp(intptr_t(d), intptr_t(pc()), 30), rt); has_delay_slot(); assert(rt != relocInfo::virtual_call_type, "must use virtual_call_Relocation::spec"); } inline void Assembler::call( Label& L, relocInfo::relocType rt ) { insert_nop_after_cbcond(); call( target(L), rt); } +inline void Assembler::call( address d, RelocationHolder const& rspec ) { insert_nop_after_cbcond(); cti(); emit_data( op(call_op) | wdisp(intptr_t(d), intptr_t(pc()), 30), rspec); has_delay_slot(); assert(rspec.type() != relocInfo::virtual_call_type, "must use virtual_call_Relocation::spec"); } + inline void Assembler::flush( Register s1, Register s2) { emit_int32( op(arith_op) | op3(flush_op3) | rs1(s1) | rs2(s2)); } inline void Assembler::flush( Register s1, int simm13a) { emit_data( op(arith_op) | op3(flush_op3) | rs1(s1) | immed(true) | simm(simm13a, 13)); } diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/cpu/sparc/vm/macroAssembler_sparc.cpp --- a/hotspot/src/cpu/sparc/vm/macroAssembler_sparc.cpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/cpu/sparc/vm/macroAssembler_sparc.cpp Fri Dec 04 23:46:19 2015 +0300 @@ -770,8 +770,8 @@ } -void MacroAssembler::ic_call(address entry, bool emit_delay) { - RelocationHolder rspec = virtual_call_Relocation::spec(pc()); +void MacroAssembler::ic_call(address entry, bool emit_delay, jint method_index) { + RelocationHolder rspec = virtual_call_Relocation::spec(pc(), method_index); patchable_set((intptr_t)Universe::non_oop_word(), G5_inline_cache_reg); relocate(rspec); call(entry, relocInfo::none); @@ -780,7 +780,6 @@ } } - void MacroAssembler::card_table_write(jbyte* byte_map_base, Register tmp, Register obj) { #ifdef _LP64 diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/cpu/sparc/vm/macroAssembler_sparc.hpp --- a/hotspot/src/cpu/sparc/vm/macroAssembler_sparc.hpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/cpu/sparc/vm/macroAssembler_sparc.hpp Fri Dec 04 23:46:19 2015 +0300 @@ -729,7 +729,11 @@ // Check if the call target is out of wdisp30 range (relative to the code cache) static inline bool is_far_target(address d); inline void call( address d, relocInfo::relocType rt = relocInfo::runtime_call_type ); + inline void call( address d, RelocationHolder const& rspec); + inline void call( Label& L, relocInfo::relocType rt = relocInfo::runtime_call_type ); + inline void call( Label& L, RelocationHolder const& rspec); + inline void callr( Register s1, Register s2 ); inline void callr( Register s1, int simm13a, RelocationHolder const& rspec = RelocationHolder() ); @@ -1146,7 +1150,7 @@ void set_vm_result(Register oop_result); // Emit the CompiledIC call idiom - void ic_call(address entry, bool emit_delay = true); + void ic_call(address entry, bool emit_delay = true, jint method_index = 0); // if call_VM_base was called with check_exceptions=false, then call // check_and_forward_exception to handle exceptions when it is safe diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/cpu/sparc/vm/macroAssembler_sparc.inline.hpp --- a/hotspot/src/cpu/sparc/vm/macroAssembler_sparc.inline.hpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/cpu/sparc/vm/macroAssembler_sparc.inline.hpp Fri Dec 04 23:46:19 2015 +0300 @@ -298,6 +298,10 @@ // expense of relocation and if we overflow the displacement // of the quick call instruction. inline void MacroAssembler::call( address d, relocInfo::relocType rt ) { + MacroAssembler::call(d, Relocation::spec_simple(rt)); +} + +inline void MacroAssembler::call( address d, RelocationHolder const& rspec ) { #ifdef _LP64 intptr_t disp; // NULL is ok because it will be relocated later. @@ -309,14 +313,14 @@ // Is this address within range of the call instruction? // If not, use the expensive instruction sequence if (is_far_target(d)) { - relocate(rt); + relocate(rspec); AddressLiteral dest(d); jumpl_to(dest, O7, O7); } else { - Assembler::call(d, rt); + Assembler::call(d, rspec); } #else - Assembler::call( d, rt ); + Assembler::call( d, rspec ); #endif } diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/cpu/sparc/vm/nativeInst_sparc.cpp --- a/hotspot/src/cpu/sparc/vm/nativeInst_sparc.cpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/cpu/sparc/vm/nativeInst_sparc.cpp Fri Dec 04 23:46:19 2015 +0300 @@ -131,8 +131,9 @@ void NativeCall::verify() { NativeInstruction::verify(); // make sure code pattern is actually a call instruction - if (!is_op(long_at(0), Assembler::call_op)) { - fatal("not a call"); + int x = long_at(0); + if (!is_op(x, Assembler::call_op)) { + fatal("not a call: 0x%x @ " INTPTR_FORMAT, x, p2i(instruction_address())); } } diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/cpu/sparc/vm/sparc.ad --- a/hotspot/src/cpu/sparc/vm/sparc.ad Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/cpu/sparc/vm/sparc.ad Fri Dec 04 23:46:19 2015 +0300 @@ -1001,7 +1001,7 @@ #endif } -void emit_call_reloc(CodeBuffer &cbuf, intptr_t entry_point, relocInfo::relocType rtype, bool preserve_g2 = false) { +void emit_call_reloc(CodeBuffer &cbuf, intptr_t entry_point, RelocationHolder const& rspec, bool preserve_g2 = false) { // The method which records debug information at every safepoint // expects the call to be the first instruction in the snippet as // it creates a PcDesc structure which tracks the offset of a call @@ -1023,7 +1023,7 @@ int startpos = __ offset(); #endif /* ASSERT */ - __ call((address)entry_point, rtype); + __ call((address)entry_point, rspec); if (preserve_g2) __ delayed()->mov(G2, L7); else __ delayed()->nop(); @@ -2593,8 +2593,7 @@ enc_class Java_To_Runtime (method meth) %{ // CALL Java_To_Runtime // CALL directly to the runtime // The user of this is responsible for ensuring that R_L7 is empty (killed). - emit_call_reloc(cbuf, $meth$$method, relocInfo::runtime_call_type, - /*preserve_g2=*/true); + emit_call_reloc(cbuf, $meth$$method, runtime_call_Relocation::spec(), /*preserve_g2=*/true); %} enc_class preserve_SP %{ @@ -2611,13 +2610,14 @@ // CALL to fixup routine. Fixup routine uses ScopeDesc info to determine // who we intended to call. if (!_method) { - emit_call_reloc(cbuf, $meth$$method, relocInfo::runtime_call_type); - } else if (_optimized_virtual) { - emit_call_reloc(cbuf, $meth$$method, relocInfo::opt_virtual_call_type); + emit_call_reloc(cbuf, $meth$$method, runtime_call_Relocation::spec()); } else { - emit_call_reloc(cbuf, $meth$$method, relocInfo::static_call_type); - } - if (_method) { // Emit stub for static call. + int method_index = resolved_method_index(cbuf); + RelocationHolder rspec = _optimized_virtual ? opt_virtual_call_Relocation::spec(method_index) + : static_call_Relocation::spec(method_index); + emit_call_reloc(cbuf, $meth$$method, rspec); + + // Emit stub for static call. address stub = CompiledStaticCall::emit_to_interp_stub(cbuf); // Stub does not fit into scratch buffer if TraceJumps is enabled if (stub == NULL && !(TraceJumps && Compile::current()->in_scratch_emit_size())) { @@ -2638,7 +2638,7 @@ Register G5_ic_reg = reg_to_register_object(Matcher::inline_cache_reg_encode()); assert(G5_ic_reg == G5_inline_cache_reg, "G5_inline_cache_reg used in assemble_ic_buffer_code()"); assert(G5_ic_reg == G5_megamorphic_method, "G5_megamorphic_method used in megamorphic call stub"); - __ ic_call((address)$meth$$method); + __ ic_call((address)$meth$$method, /*emit_delay=*/true, resolved_method_index(cbuf)); } else { assert(!UseInlineCaches, "expect vtable calls only if not using ICs"); // Just go thru the vtable @@ -10069,10 +10069,10 @@ format %{ "String Compare byte[] $str1,$cnt1,$str2,$cnt2 -> $result // KILL $tmp" %} ins_encode %{ __ string_compare($str1$$Register, $str2$$Register, - $cnt1$$Register, $cnt2$$Register, + $cnt1$$Register, $cnt2$$Register, $tmp$$Register, $tmp$$Register, $result$$Register, StrIntrinsicNode::LL); - %} + %} ins_pipe(long_memory_op); %} @@ -10088,7 +10088,7 @@ $cnt1$$Register, $cnt2$$Register, $tmp$$Register, $tmp$$Register, $result$$Register, StrIntrinsicNode::UU); - %} + %} ins_pipe(long_memory_op); %} @@ -10104,7 +10104,7 @@ $cnt1$$Register, $cnt2$$Register, $tmp1$$Register, $tmp2$$Register, $result$$Register, StrIntrinsicNode::LU); - %} + %} ins_pipe(long_memory_op); %} @@ -10117,10 +10117,10 @@ format %{ "String Compare byte[] $str1,$cnt1,$str2,$cnt2 -> $result // KILL $tmp1,$tmp2" %} ins_encode %{ __ string_compare($str2$$Register, $str1$$Register, - $cnt2$$Register, $cnt1$$Register, + $cnt2$$Register, $cnt1$$Register, $tmp1$$Register, $tmp2$$Register, $result$$Register, StrIntrinsicNode::UL); - %} + %} ins_pipe(long_memory_op); %} diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/cpu/x86/vm/macroAssembler_x86.cpp --- a/hotspot/src/cpu/x86/vm/macroAssembler_x86.cpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/cpu/x86/vm/macroAssembler_x86.cpp Fri Dec 04 23:46:19 2015 +0300 @@ -2260,8 +2260,8 @@ } } -void MacroAssembler::ic_call(address entry) { - RelocationHolder rh = virtual_call_Relocation::spec(pc()); +void MacroAssembler::ic_call(address entry, jint method_index) { + RelocationHolder rh = virtual_call_Relocation::spec(pc(), method_index); movptr(rax, (intptr_t)Universe::non_oop_word()); call(AddressLiteral(entry, rh)); } diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/cpu/x86/vm/macroAssembler_x86.hpp --- a/hotspot/src/cpu/x86/vm/macroAssembler_x86.hpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/cpu/x86/vm/macroAssembler_x86.hpp Fri Dec 04 23:46:19 2015 +0300 @@ -850,7 +850,7 @@ void call(AddressLiteral entry); // Emit the CompiledIC call idiom - void ic_call(address entry); + void ic_call(address entry, jint method_index = 0); // Jumps diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/cpu/x86/vm/x86_32.ad --- a/hotspot/src/cpu/x86/vm/x86_32.ad Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/cpu/x86/vm/x86_32.ad Fri Dec 04 23:46:19 2015 +0300 @@ -1898,17 +1898,18 @@ // who we intended to call. cbuf.set_insts_mark(); $$$emit8$primary; + if (!_method) { emit_d32_reloc(cbuf, ($meth$$method - (int)(cbuf.insts_end()) - 4), - runtime_call_Relocation::spec(), RELOC_IMM32 ); - } else if (_optimized_virtual) { - emit_d32_reloc(cbuf, ($meth$$method - (int)(cbuf.insts_end()) - 4), - opt_virtual_call_Relocation::spec(), RELOC_IMM32 ); + runtime_call_Relocation::spec(), + RELOC_IMM32); } else { + int method_index = resolved_method_index(cbuf); + RelocationHolder rspec = _optimized_virtual ? opt_virtual_call_Relocation::spec(method_index) + : static_call_Relocation::spec(method_index); emit_d32_reloc(cbuf, ($meth$$method - (int)(cbuf.insts_end()) - 4), - static_call_Relocation::spec(), RELOC_IMM32 ); - } - if (_method) { // Emit stub for static call. + rspec, RELOC_DISP32); + // Emit stubs for static call. address stub = CompiledStaticCall::emit_to_interp_stub(cbuf); if (stub == NULL) { ciEnv::current()->record_failure("CodeCache is full"); @@ -1919,7 +1920,7 @@ enc_class Java_Dynamic_Call (method meth) %{ // JAVA DYNAMIC CALL MacroAssembler _masm(&cbuf); - __ ic_call((address)$meth$$method); + __ ic_call((address)$meth$$method, resolved_method_index(cbuf)); %} enc_class Java_Compiled_Call (method meth) %{ // JAVA COMPILED CALL @@ -11504,7 +11505,7 @@ __ arrays_equals(false, $str1$$Register, $str2$$Register, $cnt$$Register, $result$$Register, $tmp3$$Register, $tmp1$$XMMRegister, $tmp2$$XMMRegister, false /* char */); - %} + %} ins_pipe( pipe_slow ); %} diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/cpu/x86/vm/x86_64.ad --- a/hotspot/src/cpu/x86/vm/x86_64.ad Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/cpu/x86/vm/x86_64.ad Fri Dec 04 23:46:19 2015 +0300 @@ -2120,22 +2120,15 @@ $$$emit8$primary; if (!_method) { - emit_d32_reloc(cbuf, - (int) ($meth$$method - ((intptr_t) cbuf.insts_end()) - 4), + emit_d32_reloc(cbuf, (int) ($meth$$method - ((intptr_t) cbuf.insts_end()) - 4), runtime_call_Relocation::spec(), RELOC_DISP32); - } else if (_optimized_virtual) { - emit_d32_reloc(cbuf, - (int) ($meth$$method - ((intptr_t) cbuf.insts_end()) - 4), - opt_virtual_call_Relocation::spec(), - RELOC_DISP32); } else { - emit_d32_reloc(cbuf, - (int) ($meth$$method - ((intptr_t) cbuf.insts_end()) - 4), - static_call_Relocation::spec(), - RELOC_DISP32); - } - if (_method) { + int method_index = resolved_method_index(cbuf); + RelocationHolder rspec = _optimized_virtual ? opt_virtual_call_Relocation::spec(method_index) + : static_call_Relocation::spec(method_index); + emit_d32_reloc(cbuf, (int) ($meth$$method - ((intptr_t) cbuf.insts_end()) - 4), + rspec, RELOC_DISP32); // Emit stubs for static call. address mark = cbuf.insts_mark(); address stub = CompiledStaticCall::emit_to_interp_stub(cbuf, mark); @@ -2148,7 +2141,7 @@ enc_class Java_Dynamic_Call(method meth) %{ MacroAssembler _masm(&cbuf); - __ ic_call((address)$meth$$method); + __ ic_call((address)$meth$$method, resolved_method_index(cbuf)); %} enc_class Java_Compiled_Call(method meth) diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/share/vm/asm/codeBuffer.cpp --- a/hotspot/src/share/vm/asm/codeBuffer.cpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/share/vm/asm/codeBuffer.cpp Fri Dec 04 23:46:19 2015 +0300 @@ -305,6 +305,31 @@ } } +void CodeSection::relocate(address at, relocInfo::relocType rtype, int format, jint method_index) { + RelocationHolder rh; + switch (rtype) { + case relocInfo::none: return; + case relocInfo::opt_virtual_call_type: { + rh = opt_virtual_call_Relocation::spec(method_index); + break; + } + case relocInfo::static_call_type: { + rh = static_call_Relocation::spec(method_index); + break; + } + case relocInfo::virtual_call_type: { + assert(method_index == 0, "resolved method overriding is not supported"); + rh = Relocation::spec_simple(rtype); + break; + } + default: { + rh = Relocation::spec_simple(rtype); + break; + } + } + relocate(at, rh, format); +} + void CodeSection::relocate(address at, RelocationHolder const& spec, int format) { Relocation* reloc = spec.reloc(); relocInfo::relocType rtype = (relocInfo::relocType) reloc->type(); diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/share/vm/asm/codeBuffer.hpp --- a/hotspot/src/share/vm/asm/codeBuffer.hpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/share/vm/asm/codeBuffer.hpp Fri Dec 04 23:46:19 2015 +0300 @@ -209,10 +209,7 @@ // Emit a relocation. void relocate(address at, RelocationHolder const& rspec, int format = 0); - void relocate(address at, relocInfo::relocType rtype, int format = 0) { - if (rtype != relocInfo::none) - relocate(at, Relocation::spec_simple(rtype), format); - } + void relocate(address at, relocInfo::relocType rtype, int format = 0, jint method_index = 0); // alignment requirement for starting offset // Requirements are that the instruction area and the diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/share/vm/ci/ciMethod.hpp --- a/hotspot/src/share/vm/ci/ciMethod.hpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/share/vm/ci/ciMethod.hpp Fri Dec 04 23:46:19 2015 +0300 @@ -250,6 +250,12 @@ ciField* get_field_at_bci( int bci, bool &will_link); ciMethod* get_method_at_bci(int bci, bool &will_link, ciSignature* *declared_signature); + ciMethod* get_method_at_bci(int bci) { + bool ignored_will_link; + ciSignature* ignored_declared_signature; + return get_method_at_bci(bci, ignored_will_link, &ignored_declared_signature); + } + // Given a certain calling environment, find the monomorphic target // for the call. Return NULL if the call is not monomorphic in // its calling environment. diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/share/vm/classfile/vmSymbols.hpp --- a/hotspot/src/share/vm/classfile/vmSymbols.hpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/share/vm/classfile/vmSymbols.hpp Fri Dec 04 23:46:19 2015 +0300 @@ -1054,6 +1054,11 @@ do_name( isCompileConstant_name, "isCompileConstant") \ do_alias( isCompileConstant_signature, object_boolean_signature) \ \ + do_class(sun_hotspot_WhiteBox, "sun/hotspot/WhiteBox") \ + do_intrinsic(_deoptimize, sun_hotspot_WhiteBox, deoptimize_name, deoptimize_signature, F_R) \ + do_name( deoptimize_name, "deoptimize") \ + do_alias( deoptimize_signature, void_method_signature) \ + \ /* unsafe memory references (there are a lot of them...) */ \ do_signature(getObject_signature, "(Ljava/lang/Object;J)Ljava/lang/Object;") \ do_signature(putObject_signature, "(Ljava/lang/Object;JLjava/lang/Object;)V") \ diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/share/vm/code/compiledIC.cpp --- a/hotspot/src/share/vm/code/compiledIC.cpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/share/vm/code/compiledIC.cpp Fri Dec 04 23:46:19 2015 +0300 @@ -434,7 +434,7 @@ InlineCacheBuffer::create_transition_stub(this, info.cached_metadata(), info.entry()); } else { if (is_optimized()) { - set_ic_destination(info.entry()); + set_ic_destination(info.entry()); } else { set_ic_destination_and_value(info.entry(), info.cached_metadata()); } diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/share/vm/code/nmethod.cpp --- a/hotspot/src/share/vm/code/nmethod.cpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/share/vm/code/nmethod.cpp Fri Dec 04 23:46:19 2015 +0300 @@ -978,19 +978,23 @@ oop_maps()->print(); } } - if (PrintDebugInfo || CompilerOracle::has_option_string(_method, "PrintDebugInfo")) { + if (printmethod || PrintDebugInfo || CompilerOracle::has_option_string(_method, "PrintDebugInfo")) { print_scopes(); } - if (PrintRelocations || CompilerOracle::has_option_string(_method, "PrintRelocations")) { + if (printmethod || PrintRelocations || CompilerOracle::has_option_string(_method, "PrintRelocations")) { print_relocations(); } - if (PrintDependencies || CompilerOracle::has_option_string(_method, "PrintDependencies")) { + if (printmethod || PrintDependencies || CompilerOracle::has_option_string(_method, "PrintDependencies")) { print_dependencies(); } - if (PrintExceptionHandlers) { + if (printmethod || PrintExceptionHandlers) { print_handler_table(); print_nul_chk_table(); } + if (printmethod) { + print_recorded_oops(); + print_recorded_metadata(); + } if (xtty != NULL) { xtty->tail("print_nmethod"); } @@ -3013,6 +3017,26 @@ } } +void nmethod::print_recorded_oops() { + tty->print_cr("Recorded oops:"); + for (int i = 0; i < oops_count(); i++) { + oop o = oop_at(i); + tty->print("#%3d: " INTPTR_FORMAT " ", i, p2i(o)); + o->print_value(); + tty->cr(); + } +} + +void nmethod::print_recorded_metadata() { + tty->print_cr("Recorded metadata:"); + for (int i = 0; i < metadata_count(); i++) { + Metadata* m = metadata_at(i); + tty->print("#%3d: " INTPTR_FORMAT " ", i, p2i(m)); + m->print_value_on_maybe_null(tty); + tty->cr(); + } +} + #endif // PRODUCT const char* nmethod::reloc_string_for(u_char* begin, u_char* end) { @@ -3053,9 +3077,39 @@ } return st.as_string(); } - case relocInfo::virtual_call_type: return "virtual_call"; - case relocInfo::opt_virtual_call_type: return "optimized virtual_call"; - case relocInfo::static_call_type: return "static_call"; + case relocInfo::virtual_call_type: { + stringStream st; + st.print_raw("virtual_call"); + virtual_call_Relocation* r = iter.virtual_call_reloc(); + Method* m = r->method_value(); + if (m != NULL) { + assert(m->is_method(), ""); + m->print_short_name(&st); + } + return st.as_string(); + } + case relocInfo::opt_virtual_call_type: { + stringStream st; + st.print_raw("optimized virtual_call"); + opt_virtual_call_Relocation* r = iter.opt_virtual_call_reloc(); + Method* m = r->method_value(); + if (m != NULL) { + assert(m->is_method(), ""); + m->print_short_name(&st); + } + return st.as_string(); + } + case relocInfo::static_call_type: { + stringStream st; + st.print_raw("static_call"); + static_call_Relocation* r = iter.static_call_reloc(); + Method* m = r->method_value(); + if (m != NULL) { + assert(m->is_method(), ""); + m->print_short_name(&st); + } + return st.as_string(); + } case relocInfo::static_stub_type: return "static_stub"; case relocInfo::external_word_type: return "external_word"; case relocInfo::internal_word_type: return "internal_word"; @@ -3393,3 +3447,19 @@ return buf; } #endif + +Method* nmethod::attached_method(address call_instr) { + assert(code_contains(call_instr), "not part of the nmethod"); + RelocIterator iter(this, call_instr, call_instr + 1); + while (iter.next()) { + if (iter.addr() == call_instr) { + switch(iter.type()) { + case relocInfo::static_call_type: return iter.static_call_reloc()->method_value(); + case relocInfo::opt_virtual_call_type: return iter.opt_virtual_call_reloc()->method_value(); + case relocInfo::virtual_call_type: return iter.virtual_call_reloc()->method_value(); + } + } + } + return NULL; // not found +} + diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/share/vm/code/nmethod.hpp --- a/hotspot/src/share/vm/code/nmethod.hpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/share/vm/code/nmethod.hpp Fri Dec 04 23:46:19 2015 +0300 @@ -392,6 +392,9 @@ int handler_table_size() const { return handler_table_end() - handler_table_begin(); } int nul_chk_table_size() const { return nul_chk_table_end() - nul_chk_table_begin(); } + int oops_count() const { assert(oops_size() % oopSize == 0, ""); return (oops_size() / oopSize) + 1; } + int metadata_count() const { assert(metadata_size() % wordSize == 0, ""); return (metadata_size() / wordSize) + 1; } + int total_size () const; void dec_hotness_counter() { _hotness_counter--; } @@ -491,7 +494,7 @@ oop oop_at(int index) const { return index == 0 ? (oop) NULL: *oop_addr_at(index); } oop* oop_addr_at(int index) const { // for GC // relocation indexes are biased by 1 (because 0 is reserved) - assert(index > 0 && index <= oops_size(), "must be a valid non-zero index"); + assert(index > 0 && index <= oops_count(), "must be a valid non-zero index"); assert(!_oops_are_stale, "oops are stale"); return &oops_begin()[index - 1]; } @@ -501,13 +504,15 @@ Metadata* metadata_at(int index) const { return index == 0 ? NULL: *metadata_addr_at(index); } Metadata** metadata_addr_at(int index) const { // for GC // relocation indexes are biased by 1 (because 0 is reserved) - assert(index > 0 && index <= metadata_size(), "must be a valid non-zero index"); + assert(index > 0 && index <= metadata_count(), "must be a valid non-zero index"); return &metadata_begin()[index - 1]; } void copy_values(GrowableArray* oops); void copy_values(GrowableArray* metadata); + Method* attached_method(address call_pc); + // Relocation support private: void fix_oop_relocations(address begin, address end, bool initialize_immediates); @@ -696,6 +701,8 @@ void print_calls(outputStream* st) PRODUCT_RETURN; void print_handler_table() PRODUCT_RETURN; void print_nul_chk_table() PRODUCT_RETURN; + void print_recorded_oops() PRODUCT_RETURN; + void print_recorded_metadata() PRODUCT_RETURN; void print_nmethod(bool print_code); // need to re-define this from CodeBlob else the overload hides it diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/share/vm/code/relocInfo.cpp --- a/hotspot/src/share/vm/code/relocInfo.cpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/share/vm/code/relocInfo.cpp Fri Dec 04 23:46:19 2015 +0300 @@ -581,13 +581,14 @@ normalize_address(_cached_value, dest); jint x0 = scaled_offset_null_special(_cached_value, point); - p = pack_1_int_to(p, x0); + p = pack_2_ints_to(p, x0, _method_index); dest->set_locs_end((relocInfo*) p); } void virtual_call_Relocation::unpack_data() { - jint x0 = unpack_1_int(); + jint x0 = 0; + unpack_2_ints(x0, _method_index); address point = addr(); _cached_value = x0==0? NULL: address_from_scaled_offset(x0, point); } @@ -793,6 +794,12 @@ return _cached_value; } +Method* virtual_call_Relocation::method_value() { + Metadata* m = code()->metadata_at(_method_index); + assert(m != NULL || _method_index == 0, "should be non-null for non-zero index"); + assert(m == NULL || m->is_method(), "not a method"); + return (Method*)m; +} void virtual_call_Relocation::clear_inline_cache() { // No stubs for ICs @@ -803,6 +810,23 @@ } +void opt_virtual_call_Relocation::pack_data_to(CodeSection* dest) { + short* p = (short*) dest->locs_end(); + p = pack_1_int_to(p, _method_index); + dest->set_locs_end((relocInfo*) p); +} + +void opt_virtual_call_Relocation::unpack_data() { + _method_index = unpack_1_int(); +} + +Method* opt_virtual_call_Relocation::method_value() { + Metadata* m = code()->metadata_at(_method_index); + assert(m != NULL || _method_index == 0, "should be non-null for non-zero index"); + assert(m == NULL || m->is_method(), "not a method"); + return (Method*)m; +} + void opt_virtual_call_Relocation::clear_inline_cache() { // No stubs for ICs // Clean IC @@ -827,6 +851,22 @@ return NULL; } +Method* static_call_Relocation::method_value() { + Metadata* m = code()->metadata_at(_method_index); + assert(m != NULL || _method_index == 0, "should be non-null for non-zero index"); + assert(m == NULL || m->is_method(), "not a method"); + return (Method*)m; +} + +void static_call_Relocation::pack_data_to(CodeSection* dest) { + short* p = (short*) dest->locs_end(); + p = pack_1_int_to(p, _method_index); + dest->set_locs_end((relocInfo*) p); +} + +void static_call_Relocation::unpack_data() { + _method_index = unpack_1_int(); +} void static_call_Relocation::clear_inline_cache() { // Safe call site info @@ -1014,6 +1054,12 @@ break; } case relocInfo::static_call_type: + { + static_call_Relocation* r = (static_call_Relocation*) reloc(); + tty->print(" | [destination=" INTPTR_FORMAT " metadata=" INTPTR_FORMAT "]", + p2i(r->destination()), p2i(r->method_value())); + break; + } case relocInfo::runtime_call_type: { CallRelocation* r = (CallRelocation*) reloc(); @@ -1023,8 +1069,8 @@ case relocInfo::virtual_call_type: { virtual_call_Relocation* r = (virtual_call_Relocation*) reloc(); - tty->print(" | [destination=" INTPTR_FORMAT " cached_value=" INTPTR_FORMAT "]", - p2i(r->destination()), p2i(r->cached_value())); + tty->print(" | [destination=" INTPTR_FORMAT " cached_value=" INTPTR_FORMAT " metadata=" INTPTR_FORMAT "]", + p2i(r->destination()), p2i(r->cached_value()), p2i(r->method_value())); break; } case relocInfo::static_stub_type: @@ -1039,6 +1085,13 @@ tty->print(" | [trampoline owner=" INTPTR_FORMAT "]", p2i(r->owner())); break; } + case relocInfo::opt_virtual_call_type: + { + opt_virtual_call_Relocation* r = (opt_virtual_call_Relocation*) reloc(); + tty->print(" | [destination=" INTPTR_FORMAT " metadata=" INTPTR_FORMAT "]", + p2i(r->destination()), p2i(r->method_value())); + break; + } } tty->cr(); } diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/share/vm/code/relocInfo.hpp --- a/hotspot/src/share/vm/code/relocInfo.hpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/share/vm/code/relocInfo.hpp Fri Dec 04 23:46:19 2015 +0300 @@ -1044,27 +1044,31 @@ // "cached_value" points to the first associated set-oop. // The oop_limit helps find the last associated set-oop. // (See comments at the top of this file.) - static RelocationHolder spec(address cached_value) { + static RelocationHolder spec(address cached_value, jint method_index = 0) { RelocationHolder rh = newHolder(); - new(rh) virtual_call_Relocation(cached_value); + new(rh) virtual_call_Relocation(cached_value, method_index); return rh; } - virtual_call_Relocation(address cached_value) { + private: + address _cached_value; // location of set-value instruction + jint _method_index; // resolved method for a Java call + + virtual_call_Relocation(address cached_value, int method_index) { _cached_value = cached_value; + _method_index = method_index; assert(cached_value != NULL, "first oop address must be specified"); } - private: - address _cached_value; // location of set-value instruction - friend class RelocIterator; virtual_call_Relocation() { } - public: address cached_value(); + int method_index() { return _method_index; } + Method* method_value(); + // data is packed as scaled offsets in "2_ints" format: [f l] or [Ff Ll] // oop_limit is set to 0 if the limit falls somewhere within the call. // When unpacking, a zero oop_limit is taken to refer to the end of the call. @@ -1080,17 +1084,29 @@ relocInfo::relocType type() { return relocInfo::opt_virtual_call_type; } public: - static RelocationHolder spec() { + static RelocationHolder spec(int method_index = 0) { RelocationHolder rh = newHolder(); - new(rh) opt_virtual_call_Relocation(); + new(rh) opt_virtual_call_Relocation(method_index); return rh; } private: + jint _method_index; // resolved method for a Java call + + opt_virtual_call_Relocation(int method_index) { + _method_index = method_index; + } + friend class RelocIterator; - opt_virtual_call_Relocation() { } + opt_virtual_call_Relocation() {} public: + int method_index() { return _method_index; } + Method* method_value(); + + void pack_data_to(CodeSection* dest); + void unpack_data(); + void clear_inline_cache(); // find the matching static_stub @@ -1102,17 +1118,29 @@ relocInfo::relocType type() { return relocInfo::static_call_type; } public: - static RelocationHolder spec() { + static RelocationHolder spec(int method_index = 0) { RelocationHolder rh = newHolder(); - new(rh) static_call_Relocation(); + new(rh) static_call_Relocation(method_index); return rh; } private: + jint _method_index; // resolved method for a Java call + + static_call_Relocation(int method_index) { + _method_index = method_index; + } + friend class RelocIterator; - static_call_Relocation() { } + static_call_Relocation() {} public: + int method_index() { return _method_index; } + Method* method_value(); + + void pack_data_to(CodeSection* dest); + void unpack_data(); + void clear_inline_cache(); // find the matching static_stub diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/share/vm/interpreter/linkResolver.cpp --- a/hotspot/src/share/vm/interpreter/linkResolver.cpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/share/vm/interpreter/linkResolver.cpp Fri Dec 04 23:46:19 2015 +0300 @@ -1456,6 +1456,33 @@ return; } +void LinkResolver::resolve_invoke(CallInfo& result, Handle& recv, + const methodHandle& attached_method, + Bytecodes::Code byte, TRAPS) { + KlassHandle defc = attached_method->method_holder(); + Symbol* name = attached_method->name(); + Symbol* type = attached_method->signature(); + LinkInfo link_info(defc, name, type, KlassHandle(), /*check_access=*/false); + switch(byte) { + case Bytecodes::_invokevirtual: + resolve_virtual_call(result, recv, recv->klass(), link_info, + /*check_null_and_abstract=*/true, CHECK); + break; + case Bytecodes::_invokeinterface: + resolve_interface_call(result, recv, recv->klass(), link_info, + /*check_null_and_abstract=*/true, CHECK); + break; + case Bytecodes::_invokestatic: + resolve_static_call(result, link_info, /*initialize_class=*/false, CHECK); + break; + case Bytecodes::_invokespecial: + resolve_special_call(result, link_info, CHECK); + break; + default: + fatal("bad call: %s", Bytecodes::name(byte)); + } +} + void LinkResolver::resolve_invokestatic(CallInfo& result, const constantPoolHandle& pool, int index, TRAPS) { LinkInfo link_info(pool, index, CHECK); resolve_static_call(result, link_info, /*initialize_class*/true, CHECK); diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/share/vm/interpreter/linkResolver.hpp --- a/hotspot/src/share/vm/interpreter/linkResolver.hpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/share/vm/interpreter/linkResolver.hpp Fri Dec 04 23:46:19 2015 +0300 @@ -295,6 +295,12 @@ static void resolve_invoke(CallInfo& result, Handle recv, const constantPoolHandle& pool, int index, Bytecodes::Code byte, TRAPS); + + // runtime resolving from attached method + static void resolve_invoke(CallInfo& result, Handle& recv, + const methodHandle& attached_method, + Bytecodes::Code byte, TRAPS); + private: static void trace_method_resolution(const char* prefix, KlassHandle klass, KlassHandle resolved_klass, diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/share/vm/opto/callGenerator.cpp --- a/hotspot/src/share/vm/opto/callGenerator.cpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/share/vm/opto/callGenerator.cpp Fri Dec 04 23:46:19 2015 +0300 @@ -46,6 +46,11 @@ return TypeFunc::make(method()); } +bool CallGenerator::is_inlined_mh_linker(JVMState* jvms, ciMethod* callee) { + ciMethod* symbolic_info = jvms->method()->get_method_at_bci(jvms->bci()); + return symbolic_info->is_method_handle_intrinsic() && !callee->is_method_handle_intrinsic(); +} + //-----------------------------ParseGenerator--------------------------------- // Internal class which handles all direct bytecode traversal. class ParseGenerator : public InlineCallGenerator { @@ -137,6 +142,13 @@ } CallStaticJavaNode *call = new CallStaticJavaNode(kit.C, tf(), target, method(), kit.bci()); + if (is_inlined_mh_linker(jvms, method())) { + // To be able to issue a direct call and skip a call to MH.linkTo*/invokeBasic adapter, + // additional information about the method being invoked should be attached + // to the call site to make resolution logic work + // (see SharedRuntime::resolve_static_call_C). + call->set_override_symbolic_info(true); + } _call_node = call; // Save the call node in case we need it later if (!is_static) { // Make an explicit receiver null_check as part of this call. @@ -192,7 +204,10 @@ // the call instruction will have a seemingly deficient out-count. // (The bailout says something misleading about an "infinite loop".) if (kit.gvn().type(receiver)->higher_equal(TypePtr::NULL_PTR)) { - kit.inc_sp(method()->arg_size()); // restore arguments + assert(Bytecodes::is_invoke(kit.java_bc()), "%d: %s", kit.java_bc(), Bytecodes::name(kit.java_bc())); + ciMethod* declared_method = kit.method()->get_method_at_bci(kit.bci()); + int arg_size = declared_method->signature()->arg_size_for_bc(kit.java_bc()); + kit.inc_sp(arg_size); // restore arguments kit.uncommon_trap(Deoptimization::Reason_null_check, Deoptimization::Action_none, NULL, "null receiver"); @@ -226,6 +241,13 @@ address target = SharedRuntime::get_resolve_virtual_call_stub(); // Normal inline cache used for call CallDynamicJavaNode *call = new CallDynamicJavaNode(tf(), target, method(), _vtable_index, kit.bci()); + if (is_inlined_mh_linker(jvms, method())) { + // To be able to issue a direct call (optimized virtual or virtual) + // and skip a call to MH.linkTo*/invokeBasic adapter, additional information + // about the method being invoked should be attached to the call site to + // make resolution logic work (see SharedRuntime::resolve_{virtual,opt_virtual}_call_C). + call->set_override_symbolic_info(true); + } kit.set_arguments_for_java_call(call); kit.set_edges_for_java_call(call); Node* ret = kit.set_results_for_java_call(call); @@ -463,8 +485,8 @@ _attempt++; } - if (cg != NULL) { - assert(!cg->is_late_inline() && cg->is_inline(), "we're doing late inlining"); + if (cg != NULL && cg->is_inline()) { + assert(!cg->is_late_inline(), "we're doing late inlining"); _inline_cg = cg; Compile::current()->dec_number_of_mh_late_inlines(); return true; @@ -807,8 +829,7 @@ const int vtable_index = Method::invalid_vtable_index; CallGenerator* cg = C->call_generator(target, vtable_index, false, jvms, true, PROB_ALWAYS, NULL, true, true); assert(cg == NULL || !cg->is_late_inline() || cg->is_mh_late_inline(), "no late inline here"); - if (cg != NULL && cg->is_inline()) - return cg; + return cg; } else { const char* msg = "receiver not constant"; if (PrintInlining) C->print_inlining(callee, jvms->depth() - 1, jvms->bci(), msg); @@ -829,7 +850,7 @@ const TypeOopPtr* oop_ptr = member_name->bottom_type()->is_oopptr(); ciMethod* target = oop_ptr->const_oop()->as_member_name()->get_vmtarget(); - // In lamda forms we erase signature types to avoid resolving issues + // In lambda forms we erase signature types to avoid resolving issues // involving class loaders. When we optimize a method handle invoke // to a direct call we must cast the receiver and arguments to its // actual types. @@ -882,10 +903,9 @@ // provide us with a type speculative_receiver_type = (receiver_type != NULL) ? receiver_type->speculative_type() : NULL; } - CallGenerator* cg = C->call_generator(target, vtable_index, call_does_dispatch, jvms, true, PROB_ALWAYS, speculative_receiver_type, true, true); + CallGenerator* cg = C->call_generator(target, vtable_index, call_does_dispatch, jvms, /*allow_inline=*/true, PROB_ALWAYS, speculative_receiver_type, true, true); assert(cg == NULL || !cg->is_late_inline() || cg->is_mh_late_inline(), "no late inline here"); - if (cg != NULL && cg->is_inline()) - return cg; + return cg; } else { const char* msg = "member_name not constant"; if (PrintInlining) C->print_inlining(callee, jvms->depth() - 1, jvms->bci(), msg); diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/share/vm/opto/callGenerator.hpp --- a/hotspot/src/share/vm/opto/callGenerator.hpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/share/vm/opto/callGenerator.hpp Fri Dec 04 23:46:19 2015 +0300 @@ -49,7 +49,7 @@ public: // Accessors - ciMethod* method() const { return _method; } + ciMethod* method() const { return _method; } // is_inline: At least some code implementing the method is copied here. virtual bool is_inline() const { return false; } @@ -123,7 +123,6 @@ // How to generate vanilla out-of-line call sites: static CallGenerator* for_direct_call(ciMethod* m, bool separate_io_projs = false); // static, special static CallGenerator* for_virtual_call(ciMethod* m, int vtable_index); // virtual, interface - static CallGenerator* for_dynamic_call(ciMethod* m); // invokedynamic static CallGenerator* for_method_handle_call( JVMState* jvms, ciMethod* caller, ciMethod* callee, bool delayed_forbidden); static CallGenerator* for_method_handle_inline(JVMState* jvms, ciMethod* caller, ciMethod* callee, bool& input_not_const); @@ -170,6 +169,8 @@ C->print_inlining(callee, inline_level, bci, msg); } } + + static bool is_inlined_mh_linker(JVMState* jvms, ciMethod* m); }; diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/share/vm/opto/callnode.cpp --- a/hotspot/src/share/vm/opto/callnode.cpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/share/vm/opto/callnode.cpp Fri Dec 04 23:46:19 2015 +0300 @@ -959,7 +959,8 @@ uint CallJavaNode::size_of() const { return sizeof(*this); } uint CallJavaNode::cmp( const Node &n ) const { CallJavaNode &call = (CallJavaNode&)n; - return CallNode::cmp(call) && _method == call._method; + return CallNode::cmp(call) && _method == call._method && + _override_symbolic_info == call._override_symbolic_info; } #ifndef PRODUCT void CallJavaNode::dump_spec(outputStream *st) const { diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/share/vm/opto/callnode.hpp --- a/hotspot/src/share/vm/opto/callnode.hpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/share/vm/opto/callnode.hpp Fri Dec 04 23:46:19 2015 +0300 @@ -657,25 +657,29 @@ bool _optimized_virtual; bool _method_handle_invoke; - ciMethod* _method; // Method being direct called + bool _override_symbolic_info; // Override symbolic call site info from bytecode + ciMethod* _method; // Method being direct called public: const int _bci; // Byte Code Index of call byte code CallJavaNode(const TypeFunc* tf , address addr, ciMethod* method, int bci) : CallNode(tf, addr, TypePtr::BOTTOM), _method(method), _bci(bci), _optimized_virtual(false), - _method_handle_invoke(false) + _method_handle_invoke(false), + _override_symbolic_info(false) { init_class_id(Class_CallJava); } virtual int Opcode() const; - ciMethod* method() const { return _method; } - void set_method(ciMethod *m) { _method = m; } - void set_optimized_virtual(bool f) { _optimized_virtual = f; } - bool is_optimized_virtual() const { return _optimized_virtual; } - void set_method_handle_invoke(bool f) { _method_handle_invoke = f; } - bool is_method_handle_invoke() const { return _method_handle_invoke; } + ciMethod* method() const { return _method; } + void set_method(ciMethod *m) { _method = m; } + void set_optimized_virtual(bool f) { _optimized_virtual = f; } + bool is_optimized_virtual() const { return _optimized_virtual; } + void set_method_handle_invoke(bool f) { _method_handle_invoke = f; } + bool is_method_handle_invoke() const { return _method_handle_invoke; } + void set_override_symbolic_info(bool f) { _override_symbolic_info = f; } + bool override_symbolic_info() const { return _override_symbolic_info; } #ifndef PRODUCT virtual void dump_spec(outputStream *st) const; diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/share/vm/opto/doCall.cpp --- a/hotspot/src/share/vm/opto/doCall.cpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/share/vm/opto/doCall.cpp Fri Dec 04 23:46:19 2015 +0300 @@ -393,6 +393,100 @@ return false; } +#ifdef ASSERT +static bool check_type(ciType* t1, ciType* t2) { + // Either oop-oop or prim-prim pair. + if (t1->is_primitive_type() && t2->is_primitive_type()) { + return t1->size() == t2->size(); // argument sizes should match + } else { + return !t1->is_primitive_type() && !t2->is_primitive_type(); // oop-oop + } +} + +static bool check_inlined_mh_linker_info(ciMethod* symbolic_info, ciMethod* resolved_method) { + assert(symbolic_info->is_method_handle_intrinsic(), "sanity"); + assert(!resolved_method->is_method_handle_intrinsic(), "sanity"); + + if (!symbolic_info->is_loaded() || !resolved_method->is_loaded()) { + return true; // Don't compare unloaded methods. + } + // Linkers have appendix argument which is not passed to callee. + int has_appendix = MethodHandles::has_member_arg(symbolic_info->intrinsic_id()) ? 1 : 0; + if (symbolic_info->arg_size() != (resolved_method->arg_size() + has_appendix)) { + return false; // Total size of arguments on stack mismatch. + } + if (!check_type(symbolic_info->return_type(), resolved_method->return_type())) { + return false; // Return value size or type mismatch encountered. + } + + switch (symbolic_info->intrinsic_id()) { + case vmIntrinsics::_linkToVirtual: + case vmIntrinsics::_linkToInterface: + case vmIntrinsics::_linkToSpecial: { + if (resolved_method->is_static()) return false; + break; + } + case vmIntrinsics::_linkToStatic: { + if (!resolved_method->is_static()) return false; + break; + } + } + + ciSignature* symbolic_sig = symbolic_info->signature(); + ciSignature* resolved_sig = resolved_method->signature(); + + if (symbolic_sig->count() + (symbolic_info->is_static() ? 0 : 1) != + resolved_sig->count() + (resolved_method->is_static() ? 0 : 1) + has_appendix) { + return false; // Argument count mismatch + } + + int sbase = 0, rbase = 0; + int arg_count = MIN2(symbolic_sig->count() - has_appendix, resolved_sig->count()); + ciType* recv_type = NULL; + if (symbolic_info->is_static() && !resolved_method->is_static()) { + recv_type = symbolic_sig->type_at(0); + sbase = 1; + } else if (!symbolic_info->is_static() && resolved_method->is_static()) { + recv_type = resolved_sig->type_at(0); + rbase = 1; + } + if (recv_type != NULL && recv_type->is_primitive_type()) { + return false; // Receiver should be an oop. + } + for (int i = 0; i < arg_count; i++) { + if (!check_type(symbolic_sig->type_at(sbase + i), resolved_sig->type_at(rbase + i))) { + return false; // Argument size or type mismatch encountered. + } + } + return true; +} + +static bool is_call_consistent_with_jvms(JVMState* jvms, CallGenerator* cg) { + ciMethod* symbolic_info = jvms->method()->get_method_at_bci(jvms->bci()); + ciMethod* resolved_method = cg->method(); + + if (CallGenerator::is_inlined_mh_linker(jvms, resolved_method)) { + return check_inlined_mh_linker_info(symbolic_info, resolved_method); + } else { + // Method name & descriptor should stay the same. + return (symbolic_info->get_Method()->name() == resolved_method->get_Method()->name()) && + (symbolic_info->get_Method()->signature() == resolved_method->get_Method()->signature()); + } +} + +static bool check_call_consistency(JVMState* jvms, CallGenerator* cg) { + if (!is_call_consistent_with_jvms(jvms, cg)) { + tty->print_cr("JVMS:"); + jvms->dump(); + tty->print_cr("Bytecode info:"); + jvms->method()->get_method_at_bci(jvms->bci())->print(); tty->cr(); + tty->print_cr("Resolved method:"); + cg->method()->print(); tty->cr(); + return false; + } + return true; +} +#endif // ASSERT //------------------------------do_call---------------------------------------- // Handle your basic call. Inline if we can & want to, else just setup call. @@ -571,6 +665,8 @@ set_jvms(new_jvms); } + assert(check_call_consistency(jvms, cg), "inconsistent info"); + if (!stopped()) { // This was some sort of virtual call, which did a null check for us. // Now we can assert receiver-not-null, on the normal return path. diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/share/vm/opto/library_call.cpp --- a/hotspot/src/share/vm/opto/library_call.cpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/share/vm/opto/library_call.cpp Fri Dec 04 23:46:19 2015 +0300 @@ -315,6 +315,8 @@ bool inline_profileBoolean(); bool inline_isCompileConstant(); + + bool inline_deoptimize(); }; //---------------------------make_vm_intrinsic---------------------------- @@ -750,6 +752,9 @@ case vmIntrinsics::_hasNegatives: return inline_hasNegatives(); + case vmIntrinsics::_deoptimize: + return inline_deoptimize(); + default: // If you get here, it may be that someone has added a new intrinsic // to the list in vmSymbols.hpp without implementing it here. @@ -6574,3 +6579,12 @@ set_result(n->is_Con() ? intcon(1) : intcon(0)); return true; } + +bool LibraryCallKit::inline_deoptimize() { + assert(WhiteBoxAPI, ""); + PreserveReexecuteState preexecs(this); + jvms()->set_should_reexecute(false); + uncommon_trap(Deoptimization::Reason_intrinsic, + Deoptimization::Action_none); + return true; +} diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/share/vm/opto/machnode.cpp --- a/hotspot/src/share/vm/opto/machnode.cpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/share/vm/opto/machnode.cpp Fri Dec 04 23:46:19 2015 +0300 @@ -707,7 +707,8 @@ uint MachCallJavaNode::size_of() const { return sizeof(*this); } uint MachCallJavaNode::cmp( const Node &n ) const { MachCallJavaNode &call = (MachCallJavaNode&)n; - return MachCallNode::cmp(call) && _method->equals(call._method); + return MachCallNode::cmp(call) && _method->equals(call._method) && + _override_symbolic_info == call._override_symbolic_info; } #ifndef PRODUCT void MachCallJavaNode::dump_spec(outputStream *st) const { diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/share/vm/opto/machnode.hpp --- a/hotspot/src/share/vm/opto/machnode.hpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/share/vm/opto/machnode.hpp Fri Dec 04 23:46:19 2015 +0300 @@ -885,16 +885,28 @@ virtual uint cmp( const Node &n ) const; virtual uint size_of() const; // Size is bigger public: - ciMethod* _method; // Method being direct called - int _bci; // Byte Code index of call byte code - bool _optimized_virtual; // Tells if node is a static call or an optimized virtual - bool _method_handle_invoke; // Tells if the call has to preserve SP - MachCallJavaNode() : MachCallNode() { + ciMethod* _method; // Method being direct called + bool _override_symbolic_info; // Override symbolic call site info from bytecode + int _bci; // Byte Code index of call byte code + bool _optimized_virtual; // Tells if node is a static call or an optimized virtual + bool _method_handle_invoke; // Tells if the call has to preserve SP + MachCallJavaNode() : MachCallNode(), _override_symbolic_info(false) { init_class_id(Class_MachCallJava); } virtual const RegMask &in_RegMask(uint) const; + int resolved_method_index(CodeBuffer &cbuf) const { + if (_override_symbolic_info) { + // Attach corresponding Method* to the call site, so VM can use it during resolution + // instead of querying symbolic info from bytecode. + assert(_method != NULL, "method should be set"); + assert(_method->constant_encoding()->is_method(), "should point to a Method"); + return cbuf.oop_recorder()->find_index(_method->constant_encoding()); + } + return 0; // Use symbolic info from bytecode (resolved_method == NULL). + } + #ifndef PRODUCT virtual void dump_spec(outputStream *st) const; #endif diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/share/vm/opto/matcher.cpp --- a/hotspot/src/share/vm/opto/matcher.cpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/share/vm/opto/matcher.cpp Fri Dec 04 23:46:19 2015 +0300 @@ -1201,6 +1201,7 @@ mcall_java->_optimized_virtual = call_java->is_optimized_virtual(); is_method_handle_invoke = call_java->is_method_handle_invoke(); mcall_java->_method_handle_invoke = is_method_handle_invoke; + mcall_java->_override_symbolic_info = call_java->override_symbolic_info(); if (is_method_handle_invoke) { C->set_has_method_handle_invokes(true); } diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/share/vm/prims/methodHandles.cpp --- a/hotspot/src/share/vm/prims/methodHandles.cpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/share/vm/prims/methodHandles.cpp Fri Dec 04 23:46:19 2015 +0300 @@ -358,6 +358,19 @@ return 0; } +Bytecodes::Code MethodHandles::signature_polymorphic_intrinsic_bytecode(vmIntrinsics::ID id) { + switch(id) { + case vmIntrinsics::_linkToVirtual: return Bytecodes::_invokevirtual; + case vmIntrinsics::_linkToInterface: return Bytecodes::_invokeinterface; + case vmIntrinsics::_linkToStatic: return Bytecodes::_invokestatic; + case vmIntrinsics::_linkToSpecial: return Bytecodes::_invokespecial; + case vmIntrinsics::_invokeBasic: return Bytecodes::_invokehandle; + default: + fatal("unexpected id: (%d) %s", (uint)id, vmIntrinsics::name_at(id)); + return Bytecodes::_illegal; + } +} + int MethodHandles::signature_polymorphic_intrinsic_ref_kind(vmIntrinsics::ID iid) { switch (iid) { case vmIntrinsics::_invokeBasic: return 0; diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/share/vm/prims/methodHandles.hpp --- a/hotspot/src/share/vm/prims/methodHandles.hpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/share/vm/prims/methodHandles.hpp Fri Dec 04 23:46:19 2015 +0300 @@ -91,6 +91,10 @@ iid <= vmIntrinsics::LAST_MH_SIG_POLY); } + static bool is_signature_polymorphic_method(Method* m) { + return is_signature_polymorphic(m->intrinsic_id()); + } + static bool is_signature_polymorphic_intrinsic(vmIntrinsics::ID iid) { assert(is_signature_polymorphic(iid), ""); // Most sig-poly methods are intrinsics which do not require an @@ -131,6 +135,8 @@ return signature_polymorphic_name_id(klass, name) != vmIntrinsics::_none; } + static Bytecodes::Code signature_polymorphic_intrinsic_bytecode(vmIntrinsics::ID id); + static int get_named_constant(int which, Handle name_box, TRAPS); public: diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/share/vm/prims/whitebox.cpp --- a/hotspot/src/share/vm/prims/whitebox.cpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/share/vm/prims/whitebox.cpp Fri Dec 04 23:46:19 2015 +0300 @@ -1290,6 +1290,11 @@ return (jlong) ikh->constants(); WB_END +WB_ENTRY(void, WB_ClearInlineCaches(JNIEnv* env, jobject wb)) + VM_ClearICs clear_ics; + VMThread::execute(&clear_ics); +WB_END + template static bool GetMethodOption(JavaThread* thread, JNIEnv* env, jobject method, jstring name, T* value) { assert(value != NULL, "sanity"); @@ -1615,6 +1620,7 @@ (void*)&WB_GetMethodStringOption}, {CC"isShared", CC"(Ljava/lang/Object;)Z", (void*)&WB_IsShared }, {CC"areSharedStringsIgnored", CC"()Z", (void*)&WB_AreSharedStringsIgnored }, + {CC"clearInlineCaches", CC"()V", (void*)&WB_ClearInlineCaches }, }; #undef CC diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/share/vm/runtime/sharedRuntime.cpp --- a/hotspot/src/share/vm/runtime/sharedRuntime.cpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/share/vm/runtime/sharedRuntime.cpp Fri Dec 04 23:46:19 2015 +0300 @@ -1070,6 +1070,21 @@ return find_callee_info_helper(thread, vfst, bc, callinfo, THREAD); } +methodHandle SharedRuntime::extract_attached_method(vframeStream& vfst) { + nmethod* caller_nm = vfst.nm(); + + nmethodLocker caller_lock(caller_nm); + + address pc = vfst.frame_pc(); + { // Get call instruction under lock because another thread may be busy patching it. + MutexLockerEx ml_patch(Patching_lock, Mutex::_no_safepoint_check_flag); + if (NativeCall::is_call_before(pc)) { + NativeCall* ncall = nativeCall_before(pc); + return caller_nm->attached_method(ncall->instruction_address()); + } + } + return NULL; +} // Finds receiver, CallInfo (i.e. receiver method), and calling bytecode // for a call current in progress, i.e., arguments has been pushed on stack @@ -1087,15 +1102,37 @@ methodHandle caller(THREAD, vfst.method()); int bci = vfst.bci(); - // Find bytecode Bytecode_invoke bytecode(caller, bci); - bc = bytecode.invoke_code(); int bytecode_index = bytecode.index(); + methodHandle attached_method = extract_attached_method(vfst); + if (attached_method.not_null()) { + methodHandle callee = bytecode.static_target(CHECK_NH); + vmIntrinsics::ID id = callee->intrinsic_id(); + // When VM replaces MH.invokeBasic/linkTo* call with a direct/virtual call, + // it attaches statically resolved method to the call site. + if (MethodHandles::is_signature_polymorphic(id) && + MethodHandles::is_signature_polymorphic_intrinsic(id)) { + bc = MethodHandles::signature_polymorphic_intrinsic_bytecode(id); + + // Need to adjust invokehandle since inlining through signature-polymorphic + // method happened. + if (bc == Bytecodes::_invokehandle && + !MethodHandles::is_signature_polymorphic_method(attached_method())) { + bc = attached_method->is_static() ? Bytecodes::_invokestatic + : Bytecodes::_invokevirtual; + } + } + } else { + bc = bytecode.invoke_code(); + } + + bool has_receiver = bc != Bytecodes::_invokestatic && + bc != Bytecodes::_invokedynamic && + bc != Bytecodes::_invokehandle; + // Find receiver for non-static call - if (bc != Bytecodes::_invokestatic && - bc != Bytecodes::_invokedynamic && - bc != Bytecodes::_invokehandle) { + if (has_receiver) { // This register map must be update since we need to find the receiver for // compiled frames. The receiver might be in a register. RegisterMap reg_map2(thread); @@ -1103,10 +1140,13 @@ // Caller-frame is a compiled frame frame callerFrame = stubFrame.sender(®_map2); - methodHandle callee = bytecode.static_target(CHECK_(nullHandle)); - if (callee.is_null()) { - THROW_(vmSymbols::java_lang_NoSuchMethodException(), nullHandle); + if (attached_method.is_null()) { + methodHandle callee = bytecode.static_target(CHECK_NH); + if (callee.is_null()) { + THROW_(vmSymbols::java_lang_NoSuchMethodException(), nullHandle); + } } + // Retrieve from a compiled argument list receiver = Handle(THREAD, callerFrame.retrieve_receiver(®_map2)); @@ -1115,26 +1155,35 @@ } } - // Resolve method. This is parameterized by bytecode. - constantPoolHandle constants(THREAD, caller->constants()); assert(receiver.is_null() || receiver->is_oop(), "wrong receiver"); - LinkResolver::resolve_invoke(callinfo, receiver, constants, bytecode_index, bc, CHECK_(nullHandle)); + + // Resolve method + if (attached_method.not_null()) { + // Parameterized by attached method. + LinkResolver::resolve_invoke(callinfo, receiver, attached_method, bc, CHECK_NH); + } else { + // Parameterized by bytecode. + constantPoolHandle constants(THREAD, caller->constants()); + LinkResolver::resolve_invoke(callinfo, receiver, constants, bytecode_index, bc, CHECK_NH); + } #ifdef ASSERT // Check that the receiver klass is of the right subtype and that it is initialized for virtual calls - if (bc != Bytecodes::_invokestatic && bc != Bytecodes::_invokedynamic && bc != Bytecodes::_invokehandle) { + if (has_receiver) { assert(receiver.not_null(), "should have thrown exception"); KlassHandle receiver_klass(THREAD, receiver->klass()); - Klass* rk = constants->klass_ref_at(bytecode_index, CHECK_(nullHandle)); - // klass is already loaded + Klass* rk = NULL; + if (attached_method.not_null()) { + // In case there's resolved method attached, use its holder during the check. + rk = attached_method->method_holder(); + } else { + // Klass is already loaded. + constantPoolHandle constants(THREAD, caller->constants()); + rk = constants->klass_ref_at(bytecode_index, CHECK_NH); + } KlassHandle static_receiver_klass(THREAD, rk); - // Method handle invokes might have been optimized to a direct call - // so don't check for the receiver class. - // FIXME this weakens the assert too much methodHandle callee = callinfo.selected_method(); - assert(receiver_klass->is_subtype_of(static_receiver_klass()) || - callee->is_method_handle_intrinsic() || - callee->is_compiled_lambda_form(), + assert(receiver_klass->is_subtype_of(static_receiver_klass()), "actual receiver must be subclass of static receiver klass"); if (receiver_klass->is_instance_klass()) { if (InstanceKlass::cast(receiver_klass())->is_not_initialized()) { @@ -1670,7 +1719,6 @@ inline_cache->set_to_clean(); } } - } methodHandle callee_method = find_callee_method(thread, CHECK_(methodHandle())); diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/share/vm/runtime/sharedRuntime.hpp --- a/hotspot/src/share/vm/runtime/sharedRuntime.hpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/share/vm/runtime/sharedRuntime.hpp Fri Dec 04 23:46:19 2015 +0300 @@ -343,6 +343,8 @@ Bytecodes::Code& bc, CallInfo& callinfo, TRAPS); + static methodHandle extract_attached_method(vframeStream& vfst); + static address clean_virtual_call_entry(); static address clean_opt_virtual_call_entry(); static address clean_static_call_entry(); diff -r 839c8ba29724 -r bbf32241d851 hotspot/src/share/vm/runtime/vm_operations.hpp --- a/hotspot/src/share/vm/runtime/vm_operations.hpp Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/src/share/vm/runtime/vm_operations.hpp Fri Dec 04 23:46:19 2015 +0300 @@ -30,6 +30,7 @@ #include "oops/oop.hpp" #include "runtime/thread.hpp" #include "utilities/top.hpp" +#include "code/codeCache.hpp" // The following classes are used for operations // initiated by a Java thread but that must @@ -44,6 +45,7 @@ template(ThreadDump) \ template(PrintThreads) \ template(FindDeadlocks) \ + template(ClearICs) \ template(ForceSafepoint) \ template(ForceAsyncSafepoint) \ template(Deoptimize) \ @@ -230,6 +232,13 @@ } }; +class VM_ClearICs: public VM_Operation { + public: + VM_ClearICs() {} + void doit() { CodeCache::clear_inline_caches(); } + VMOp_Type type() const { return VMOp_ClearICs; } +}; + // dummy vm op, evaluated just to force a safepoint class VM_ForceSafepoint: public VM_Operation { public: diff -r 839c8ba29724 -r bbf32241d851 hotspot/test/compiler/jsr292/NonInlinedCall/Agent.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hotspot/test/compiler/jsr292/NonInlinedCall/Agent.java Fri Dec 04 23:46:19 2015 +0300 @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2015, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +import java.io.File; +import java.io.PrintStream; +import java.lang.instrument.Instrumentation; +import java.util.Arrays; + +public class Agent { + public static void main(String[] args) throws Exception { + String jarName = args[0]; + String className = args[1]; + String manifestName = "manifest.mf"; + + System.out.println("Creating "+manifestName); + try (PrintStream out = new PrintStream(new File(manifestName))) { + out.println("Premain-Class: " + className); + out.println("Can-Redefine-Classes: true"); + } + System.out.println("Building "+jarName); + String[] jarArgs = new String[] {"-cfm", jarName, manifestName }; + + System.out.println("Running jar " + Arrays.toString(jarArgs)); + sun.tools.jar.Main jarTool = new sun.tools.jar.Main(System.out, System.err, "jar"); + if (!jarTool.run(jarArgs)) { + throw new Error("jar failed: args=" + Arrays.toString(args)); + } + } +} diff -r 839c8ba29724 -r bbf32241d851 hotspot/test/compiler/jsr292/NonInlinedCall/GCTest.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hotspot/test/compiler/jsr292/NonInlinedCall/GCTest.java Fri Dec 04 23:46:19 2015 +0300 @@ -0,0 +1,105 @@ +/* + * Copyright (c) 2015, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ + +/* + * @test + * @bug 8072008 + * @library /testlibrary /../../test/lib + * @build GCTest NonInlinedReinvoker + * @run main ClassFileInstaller sun.hotspot.WhiteBox + * sun.hotspot.WhiteBox$WhiteBoxPermission + * java.lang.invoke.GCTest + * java.lang.invoke.GCTest$T + * java.lang.invoke.NonInlinedReinvoker + * jdk.test.lib.Asserts + * @run main/othervm -Xbootclasspath/a:. -XX:+IgnoreUnrecognizedVMOptions + * -XX:+UnlockDiagnosticVMOptions -XX:+WhiteBoxAPI + * -Xbatch -XX:-TieredCompilation -XX:CICompilerCount=1 + * java.lang.invoke.GCTest + */ +package java.lang.invoke; + +import sun.hotspot.WhiteBox; + +import java.lang.ref.*; +import static jdk.test.lib.Asserts.*; + +public class GCTest { + static final MethodHandles.Lookup LOOKUP = MethodHandles.Lookup.IMPL_LOOKUP; + + static class T { + static int f1() { return 0; } + static int f2() { return 1; } + } + + static @Stable MethodHandle mh; + static PhantomReference lform; + + static final ReferenceQueue rq = new ReferenceQueue<>(); + static final WhiteBox WB = WhiteBox.getWhiteBox(); + + @DontInline + static int invokeBasic() { + try { + return (int) mh.invokeBasic(); + } catch (Throwable e) { + throw new Error(e); + } + } + + static void test(int expected) { + for (int i = 0; i < 20_000; i++) { + invokeBasic(); + } + assertEquals(invokeBasic(), expected); + } + + public static void main(String[] args) throws Exception { + mh = NonInlinedReinvoker.make( + LOOKUP.findStatic(T.class, "f1", MethodType.methodType(int.class))); + + // Monitor LambdaForm GC + lform = new PhantomReference<>(mh.form, rq); + + test(0); + WB.clearInlineCaches(); + test(0); + + mh = NonInlinedReinvoker.make( + LOOKUP.findStatic(T.class, "f2", MethodType.methodType(int.class))); + + Reference ref = null; + while (ref == null) { + WB.fullGC(); + try { + ref = rq.remove(1000); + } catch (InterruptedException e) { /*ignore*/ } + } + + test(1); + WB.clearInlineCaches(); + test(1); + + System.out.println("TEST PASSED"); + } +} diff -r 839c8ba29724 -r bbf32241d851 hotspot/test/compiler/jsr292/NonInlinedCall/InvokeTest.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hotspot/test/compiler/jsr292/NonInlinedCall/InvokeTest.java Fri Dec 04 23:46:19 2015 +0300 @@ -0,0 +1,218 @@ +/* + * Copyright (c) 2015, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ + +/* + * @test + * @bug 8072008 + * @library /testlibrary /../../test/lib + * @build InvokeTest NonInlinedReinvoker + * @run main ClassFileInstaller sun.hotspot.WhiteBox + * sun.hotspot.WhiteBox$WhiteBoxPermission + * java.lang.invoke.InvokeTest + * java.lang.invoke.InvokeTest$T + * java.lang.invoke.InvokeTest$P1 + * java.lang.invoke.InvokeTest$P2 + * java.lang.invoke.InvokeTest$I + * java.lang.invoke.NonInlinedReinvoker + * jdk.test.lib.Asserts + * @run main/othervm -Xbootclasspath/a:. -XX:+IgnoreUnrecognizedVMOptions + * -XX:+UnlockDiagnosticVMOptions -XX:+WhiteBoxAPI + * -Xbatch -XX:-TieredCompilation -XX:CICompilerCount=1 + * java.lang.invoke.InvokeTest + */ +package java.lang.invoke; + +import sun.hotspot.WhiteBox; +import static jdk.test.lib.Asserts.*; + +public class InvokeTest { + static MethodHandles.Lookup LOOKUP = MethodHandles.Lookup.IMPL_LOOKUP; + + static final MethodHandle virtualMH; // invokevirtual T.f1 + static final MethodHandle staticMH; // invokestatic T.f2 + static final MethodHandle intfMH; // invokeinterface I.f1 + static final MethodHandle specialMH; // invokespecial T.f4 T + static final MethodHandle basicMH; + + static final WhiteBox WB = WhiteBox.getWhiteBox(); + + static volatile boolean doDeopt = false; + + static { + try { + MethodType mtype = MethodType.methodType(Class.class); + + virtualMH = LOOKUP.findVirtual(T.class, "f1", mtype); + staticMH = LOOKUP.findStatic (T.class, "f2", mtype); + intfMH = LOOKUP.findVirtual(I.class, "f3", mtype); + specialMH = LOOKUP.findSpecial(T.class, "f4", mtype, T.class); + basicMH = NonInlinedReinvoker.make(staticMH); + } catch (Exception e) { + throw new Error(e); + } + } + + static class T implements I { + @DontInline public Class f1() { if (doDeopt) WB.deoptimize(); return T.class; } + @DontInline public static Class f2() { if (doDeopt) WB.deoptimize(); return T.class; } + @DontInline private Class f4() { if (doDeopt) WB.deoptimize(); return T.class; } + } + + static class P1 extends T { + @DontInline public Class f1() { if (doDeopt) WB.deoptimize(); return P1.class; } + @DontInline public Class f3() { if (doDeopt) WB.deoptimize(); return P1.class; } + } + + static class P2 extends T { + @DontInline public Class f1() { if (doDeopt) WB.deoptimize(); return P2.class; } + @DontInline public Class f3() { if (doDeopt) WB.deoptimize(); return P2.class; } + } + + static interface I { + @DontInline default Class f3() { if (doDeopt) WB.deoptimize(); return I.class; } + } + + @DontInline + static void linkToVirtual(Object obj, Class extecpted) { + try { + Class cls = (Class)virtualMH.invokeExact((T)obj); + assertEquals(cls, obj.getClass()); + } catch (Throwable e) { + throw new Error(e); + } + } + + @DontInline + static void linkToInterface(Object obj, Class expected) { + try { + Class cls = (Class)intfMH.invokeExact((I)obj); + assertEquals(cls, expected); + } catch (Throwable e) { + throw new Error(e); + } + } + + @DontInline + static void linkToStatic() { + try { + Class cls = (Class)staticMH.invokeExact(); + assertEquals(cls, T.class); + } catch (Throwable e) { + throw new Error(e); + } + } + + @DontInline + static void linkToSpecial(Object obj, Class expected) { + try { + Class cls = (Class)specialMH.invokeExact((T)obj); + assertEquals(cls, expected); + } catch (Throwable e) { + throw new Error(e); + } + } + + @DontInline + static void invokeBasic() { + try { + Class cls = (Class)basicMH.invokeBasic(); + assertEquals(cls, T.class); + } catch (Throwable e) { + throw new Error(e); + } + } + + static void run(Runnable r) { + for (int i = 0; i < 20_000; i++) { + r.run(); + } + + doDeopt = true; + r.run(); + doDeopt = false; + + WB.clearInlineCaches(); + + for (int i = 0; i < 20_000; i++) { + r.run(); + } + + doDeopt = true; + r.run(); + doDeopt = false; + } + + static void testVirtual() { + System.out.println("linkToVirtual"); + + // Monomorphic case (optimized virtual call) + run(() -> linkToVirtual(new T(), T.class)); + + // Megamorphic case (virtual call) + Object[] recv = new Object[] { new T(), new P1(), new P2() }; + run(() -> { + for (Object r : recv) { + linkToVirtual(r, r.getClass()); + }}); + } + + static void testInterface() { + System.out.println("linkToInterface"); + + // Monomorphic case (optimized virtual call) + run(() -> linkToInterface(new T(), I.class)); + + // Megamorphic case (virtual call) + Object[][] recv = new Object[][] {{new T(), I.class}, {new P1(), P1.class}, {new P2(), P2.class}}; + run(() -> { + for (Object[] r : recv) { + linkToInterface(r[0], (Class)r[1]); + }}); + } + + static void testSpecial() { + System.out.println("linkToSpecial"); + // Monomorphic case (optimized virtual call) + run(() -> linkToSpecial(new T(), T.class)); + } + + static void testStatic() { + System.out.println("linkToStatic"); + // static call + run(() -> linkToStatic()); + } + + static void testBasic() { + System.out.println("invokeBasic"); + // static call + run(() -> invokeBasic()); + } + + public static void main(String[] args) { + testVirtual(); + testInterface(); + testSpecial(); + testStatic(); + testBasic(); + } +} diff -r 839c8ba29724 -r bbf32241d851 hotspot/test/compiler/jsr292/NonInlinedCall/NonInlinedReinvoker.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hotspot/test/compiler/jsr292/NonInlinedCall/NonInlinedReinvoker.java Fri Dec 04 23:46:19 2015 +0300 @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2015, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +package java.lang.invoke; + +class NonInlinedReinvoker extends DelegatingMethodHandle { + private final MethodHandle target; + + private NonInlinedReinvoker(MethodHandle target, LambdaForm lf) { + super(target.type(), lf); + this.target = target; + } + @Override + protected MethodHandle getTarget() { + return target; + } + + @Override + MethodHandle asTypeUncached(MethodType newType) { + return asTypeCache = target.asType(newType); + } + + static MethodHandle make(MethodHandle target) { + LambdaForm lform = DelegatingMethodHandle.makeReinvokerForm( + target, -1, DelegatingMethodHandle.class, "reinvoker.dontInline", + /*forceInline=*/false, DelegatingMethodHandle.NF_getTarget, null); + return new NonInlinedReinvoker(target, lform); + } +} diff -r 839c8ba29724 -r bbf32241d851 hotspot/test/compiler/jsr292/NonInlinedCall/RedefineTest.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hotspot/test/compiler/jsr292/NonInlinedCall/RedefineTest.java Fri Dec 04 23:46:19 2015 +0300 @@ -0,0 +1,157 @@ +/* + * Copyright (c) 2015, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ + +/* + * @test + * @bug 8072008 + * @library /testlibrary /../../test/lib + * @build RedefineTest Agent + * @run main ClassFileInstaller sun.hotspot.WhiteBox + * sun.hotspot.WhiteBox$WhiteBoxPermission + * java.lang.invoke.RedefineTest + * Agent + * jdk.test.lib.Asserts + * @run main Agent agent.jar java.lang.invoke.RedefineTest + * @run main/othervm -Xbootclasspath/a:. -javaagent:agent.jar + * -XX:+IgnoreUnrecognizedVMOptions + * -XX:+UnlockDiagnosticVMOptions -XX:+WhiteBoxAPI + * -Xbatch -XX:-TieredCompilation -XX:CICompilerCount=1 + * java.lang.invoke.RedefineTest + */ +package java.lang.invoke; + +import sun.hotspot.WhiteBox; +import sun.misc.Unsafe; + +import jdk.internal.org.objectweb.asm.*; + +import java.lang.instrument.ClassDefinition; +import java.lang.instrument.Instrumentation; + +import static jdk.internal.org.objectweb.asm.Opcodes.*; + +public class RedefineTest { + static final MethodHandles.Lookup LOOKUP = MethodHandles.Lookup.IMPL_LOOKUP; + static final Unsafe UNSAFE = Unsafe.getUnsafe(); + + static final String NAME = "java/lang/invoke/RedefineTest$T"; + + static Class getClass(int r) { + byte[] classFile = getClassFile(r); + return UNSAFE.defineClass(NAME, classFile, 0, classFile.length, null, null); + } + + /** + * Generates a class of the following shape: + * static class T { + * @DontInline public static int f() { return $r; } + * } + */ + static byte[] getClassFile(int r) { + ClassWriter cw = new ClassWriter(ClassWriter.COMPUTE_FRAMES | ClassWriter.COMPUTE_MAXS); + MethodVisitor mv; + cw.visit(52, ACC_PUBLIC | ACC_SUPER, NAME, null, "java/lang/Object", null); + { + mv = cw.visitMethod(ACC_PUBLIC | ACC_STATIC, "f", "()I", null, null); + mv.visitAnnotation("Ljava/lang/invoke/DontInline;", true); + mv.visitCode(); + mv.visitLdcInsn(r); + mv.visitInsn(IRETURN); + mv.visitMaxs(0, 0); + mv.visitEnd(); + } + cw.visitEnd(); + return cw.toByteArray(); + } + + static final MethodHandle mh; + static final Class CLS = getClass(0); + static { + try { + mh = LOOKUP.findStatic(CLS, "f", MethodType.methodType(int.class)); + } catch (Exception e) { + throw new Error(e); + } + } + + static final WhiteBox WB = WhiteBox.getWhiteBox(); + + @DontInline + static int invokeBasic() { + try { + return (int)mh.invokeExact(); + } catch (Throwable e) { + throw new Error(e); + } + } + + static Instrumentation instr; + public static void premain(String args, Instrumentation instr) { + RedefineTest.instr = instr; + } + + + public static void main(String[] args) throws Exception { + for (int i = 0; i < 20_000; i++) { + int r = invokeBasic(); + if (r != 0) { + throw new Error(r + " != 0"); + } + } + // WB.ensureCompiled(); + + redefine(); + + int exp = (instr != null) ? 1 : 0; + + for (int i = 0; i < 20_000; i++) { + if (invokeBasic() != exp) { + throw new Error(); + } + } + + WB.clearInlineCaches(); + + for (int i = 0; i < 20_000; i++) { + if (invokeBasic() != exp) { + throw new Error(); + } + } + + // WB.ensureCompiled(); + } + + static void redefine() { + if (instr == null) { + System.out.println("NOT REDEFINED"); + return; + } + ClassDefinition cd = new ClassDefinition(CLS, getClassFile(1)); + try { + instr.redefineClasses(cd); + } catch (Exception e) { + throw new Error(e); + } + System.out.println("REDEFINED"); + } +} diff -r 839c8ba29724 -r bbf32241d851 hotspot/test/sanity/MismatchedWhiteBox/WhiteBox.java --- a/hotspot/test/sanity/MismatchedWhiteBox/WhiteBox.java Fri Dec 04 16:38:04 2015 +0100 +++ b/hotspot/test/sanity/MismatchedWhiteBox/WhiteBox.java Fri Dec 04 23:46:19 2015 +0300 @@ -29,7 +29,7 @@ * @library /testlibrary * @compile WhiteBox.java * @run main ClassFileInstaller sun.hotspot.WhiteBox - * @run main/othervm -Xbootclasspath/a:. -XX:+UnlockDiagnosticVMOptions -XX:+WhiteBoxAPI sun.hotspot.WhiteBox + * @run main/othervm -Xbootclasspath/a:. -XX:+UnlockDiagnosticVMOptions -XX:+WhiteBoxAPI -XX:-CheckIntrinsics sun.hotspot.WhiteBox */ package sun.hotspot;