8222841: Incorrect static call stub interactions with class unloading
Reviewed-by: kvn, coleenp
--- a/src/hotspot/cpu/x86/compiledIC_x86.cpp Tue May 14 12:00:49 2019 +0200
+++ b/src/hotspot/cpu/x86/compiledIC_x86.cpp Tue May 14 12:07:24 2019 +0200
@@ -159,10 +159,10 @@
NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
#ifdef ASSERT
- // read the value once
- volatile intptr_t data = method_holder->data();
- volatile address destination = jump->jump_destination();
- assert(data == 0 || data == (intptr_t)callee(),
+ Method* old_method = reinterpret_cast<Method*>(method_holder->data());
+ address destination = jump->jump_destination();
+ assert(old_method == NULL || old_method == callee() ||
+ !old_method->method_holder()->is_loader_alive(),
"a) MT-unsafe modification of inline cache");
assert(destination == (address)-1 || destination == entry,
"b) MT-unsafe modification of inline cache");
--- a/src/hotspot/cpu/x86/gc/shared/barrierSetAssembler_x86.cpp Tue May 14 12:00:49 2019 +0200
+++ b/src/hotspot/cpu/x86/gc/shared/barrierSetAssembler_x86.cpp Tue May 14 12:07:24 2019 +0200
@@ -30,6 +30,7 @@
#include "interpreter/interp_masm.hpp"
#include "memory/universe.hpp"
#include "runtime/jniHandles.hpp"
+#include "runtime/sharedRuntime.hpp"
#include "runtime/thread.hpp"
#define __ masm->
@@ -344,3 +345,33 @@
__ bind(continuation);
#endif
}
+
+void BarrierSetAssembler::c2i_entry_barrier(MacroAssembler* masm) {
+ BarrierSetNMethod* bs = BarrierSet::barrier_set()->barrier_set_nmethod();
+ if (bs == NULL) {
+ return;
+ }
+
+ Label bad_call;
+ __ cmpptr(rbx, 0); // rbx contains the incoming method for c2i adapters.
+ __ jcc(Assembler::equal, bad_call);
+
+ // Pointer chase to the method holder to find out if the method is concurrently unloading.
+ Label method_live;
+ __ load_method_holder_cld(rscratch1, rbx);
+
+ // Is it a strong CLD?
+ __ movl(rscratch2, Address(rscratch1, ClassLoaderData::keep_alive_offset()));
+ __ cmpptr(rscratch2, 0);
+ __ jcc(Assembler::greater, method_live);
+
+ // Is it a weak but alive CLD?
+ __ movptr(rscratch1, Address(rscratch1, ClassLoaderData::holder_offset()));
+ __ resolve_weak_handle(rscratch1, rscratch2);
+ __ cmpptr(rscratch1, 0);
+ __ jcc(Assembler::notEqual, method_live);
+
+ __ bind(bad_call);
+ __ jump(RuntimeAddress(SharedRuntime::get_handle_wrong_method_stub()));
+ __ bind(method_live);
+}
--- a/src/hotspot/cpu/x86/gc/shared/barrierSetAssembler_x86.hpp Tue May 14 12:00:49 2019 +0200
+++ b/src/hotspot/cpu/x86/gc/shared/barrierSetAssembler_x86.hpp Tue May 14 12:07:24 2019 +0200
@@ -85,6 +85,7 @@
virtual void barrier_stubs_init() {}
virtual void nmethod_entry_barrier(MacroAssembler* masm);
+ virtual void c2i_entry_barrier(MacroAssembler* masm);
};
#endif // CPU_X86_GC_SHARED_BARRIERSETASSEMBLER_X86_HPP
--- a/src/hotspot/cpu/x86/macroAssembler_x86.cpp Tue May 14 12:00:49 2019 +0200
+++ b/src/hotspot/cpu/x86/macroAssembler_x86.cpp Tue May 14 12:07:24 2019 +0200
@@ -5175,6 +5175,23 @@
result, Address(result, 0), tmp, /*tmp_thread*/noreg);
}
+// ((WeakHandle)result).resolve();
+void MacroAssembler::resolve_weak_handle(Register rresult, Register rtmp) {
+ assert_different_registers(rresult, rtmp);
+ Label resolved;
+
+ // A null weak handle resolves to null.
+ cmpptr(rresult, 0);
+ jcc(Assembler::equal, resolved);
+
+ // Only 64 bit platforms support GCs that require a tmp register
+ // Only IN_HEAP loads require a thread_tmp register
+ // WeakHandle::resolve is an indirection like jweak.
+ access_load_at(T_OBJECT, IN_NATIVE | ON_PHANTOM_OOP_REF,
+ rresult, Address(rresult, 0), rtmp, /*tmp_thread*/noreg);
+ bind(resolved);
+}
+
void MacroAssembler::load_mirror(Register mirror, Register method, Register tmp) {
// get mirror
const int mirror_offset = in_bytes(Klass::java_mirror_offset());
@@ -5185,6 +5202,13 @@
resolve_oop_handle(mirror, tmp);
}
+void MacroAssembler::load_method_holder_cld(Register rresult, Register rmethod) {
+ movptr(rresult, Address(rmethod, Method::const_offset()));
+ movptr(rresult, Address(rresult, ConstMethod::constants_offset()));
+ movptr(rresult, Address(rresult, ConstantPool::pool_holder_offset_in_bytes()));
+ movptr(rresult, Address(rresult, InstanceKlass::class_loader_data_offset()));
+}
+
void MacroAssembler::load_klass(Register dst, Register src) {
#ifdef _LP64
if (UseCompressedClassPointers) {
--- a/src/hotspot/cpu/x86/macroAssembler_x86.hpp Tue May 14 12:00:49 2019 +0200
+++ b/src/hotspot/cpu/x86/macroAssembler_x86.hpp Tue May 14 12:07:24 2019 +0200
@@ -313,7 +313,9 @@
void testbool(Register dst);
void resolve_oop_handle(Register result, Register tmp = rscratch2);
+ void resolve_weak_handle(Register result, Register tmp);
void load_mirror(Register mirror, Register method, Register tmp = rscratch2);
+ void load_method_holder_cld(Register rresult, Register rmethod);
// oop manipulations
void load_klass(Register dst, Register src);
--- a/src/hotspot/cpu/x86/sharedRuntime_x86_64.cpp Tue May 14 12:00:49 2019 +0200
+++ b/src/hotspot/cpu/x86/sharedRuntime_x86_64.cpp Tue May 14 12:07:24 2019 +0200
@@ -971,6 +971,9 @@
address c2i_entry = __ pc();
+ BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler();
+ bs->c2i_entry_barrier(masm);
+
gen_c2i_adapter(masm, total_args_passed, comp_args_on_stack, sig_bt, regs, skip_fixup);
__ flush();
--- a/src/hotspot/share/aot/aotCompiledMethod.cpp Tue May 14 12:00:49 2019 +0200
+++ b/src/hotspot/share/aot/aotCompiledMethod.cpp Tue May 14 12:07:24 2019 +0200
@@ -278,7 +278,7 @@
}
}
} else if (iter.type() == relocInfo::static_call_type ||
- iter.type() == relocInfo::opt_virtual_call_type){
+ iter.type() == relocInfo::opt_virtual_call_type) {
// Check Method* in AOT c2i stub for other calls.
Metadata* meta = (Metadata*)nativeLoadGot_at(nativePltCall_at(iter.addr())->plt_c2i_stub())->data();
if (meta != NULL) {
--- a/src/hotspot/share/classfile/classLoaderData.hpp Tue May 14 12:00:49 2019 +0200
+++ b/src/hotspot/share/classfile/classLoaderData.hpp Tue May 14 12:07:24 2019 +0200
@@ -300,6 +300,10 @@
ModuleEntryTable* modules();
bool modules_defined() { return (_modules != NULL); }
+ // Offsets
+ static ByteSize holder_offset() { return in_ByteSize(offset_of(ClassLoaderData, _holder)); }
+ static ByteSize keep_alive_offset() { return in_ByteSize(offset_of(ClassLoaderData, _keep_alive)); }
+
// Loaded class dictionary
Dictionary* dictionary() const { return _dictionary; }
--- a/src/hotspot/share/code/compiledMethod.cpp Tue May 14 12:00:49 2019 +0200
+++ b/src/hotspot/share/code/compiledMethod.cpp Tue May 14 12:07:24 2019 +0200
@@ -468,39 +468,6 @@
return ic->set_to_clean();
}
-// static_stub_Relocations may have dangling references to
-// nmethods so trim them out here. Otherwise it looks like
-// compiled code is maintaining a link to dead metadata.
-void CompiledMethod::clean_ic_stubs() {
-#ifdef ASSERT
- address low_boundary = oops_reloc_begin();
- RelocIterator iter(this, low_boundary);
- while (iter.next()) {
- address static_call_addr = NULL;
- if (iter.type() == relocInfo::opt_virtual_call_type) {
- CompiledIC* cic = CompiledIC_at(&iter);
- if (!cic->is_call_to_interpreted()) {
- static_call_addr = iter.addr();
- }
- } else if (iter.type() == relocInfo::static_call_type) {
- CompiledStaticCall* csc = compiledStaticCall_at(iter.reloc());
- if (!csc->is_call_to_interpreted()) {
- static_call_addr = iter.addr();
- }
- }
- if (static_call_addr != NULL) {
- RelocIterator sciter(this, low_boundary);
- while (sciter.next()) {
- if (sciter.type() == relocInfo::static_stub_type &&
- sciter.static_stub_reloc()->static_call() == static_call_addr) {
- sciter.static_stub_reloc()->clear_inline_cache();
- }
- }
- }
- }
-#endif
-}
-
// Clean references to unloaded nmethods at addr from this one, which is not unloaded.
template <class CompiledICorStaticCall>
static bool clean_if_nmethod_is_unloaded(CompiledICorStaticCall *ic, address addr, CompiledMethod* from,
@@ -549,9 +516,6 @@
return false;
}
- // All static stubs need to be cleaned.
- clean_ic_stubs();
-
#ifdef ASSERT
// Check that the metadata embedded in the nmethod is alive
CheckClass check_class;
@@ -581,6 +545,7 @@
// Find all calls in an nmethod and clear the ones that point to non-entrant,
// zombie and unloaded nmethods.
RelocIterator iter(this, oops_reloc_begin());
+ bool is_in_static_stub = false;
while(iter.next()) {
switch (iter.type()) {
@@ -611,6 +576,45 @@
}
break;
+ case relocInfo::static_stub_type: {
+ is_in_static_stub = true;
+ break;
+ }
+
+ case relocInfo::metadata_type: {
+ // Only the metadata relocations contained in static/opt virtual call stubs
+ // contains the Method* passed to c2i adapters. It is the only metadata
+ // relocation that needs to be walked, as it is the one metadata relocation
+ // that violates the invariant that all metadata relocations have an oop
+ // in the compiled method (due to deferred resolution and code patching).
+
+ // This causes dead metadata to remain in compiled methods that are not
+ // unloading. Unless these slippery metadata relocations of the static
+ // stubs are at least cleared, subsequent class redefinition operations
+ // will access potentially free memory, and JavaThread execution
+ // concurrent to class unloading may call c2i adapters with dead methods.
+ if (!is_in_static_stub) {
+ // The first metadata relocation after a static stub relocation is the
+ // metadata relocation of the static stub used to pass the Method* to
+ // c2i adapters.
+ continue;
+ }
+ is_in_static_stub = false;
+ metadata_Relocation* r = iter.metadata_reloc();
+ Metadata* md = r->metadata_value();
+ if (md != NULL && md->is_method()) {
+ Method* method = static_cast<Method*>(md);
+ if (!method->method_holder()->is_loader_alive()) {
+ Atomic::store((Method*)NULL, r->metadata_addr());
+
+ if (!r->metadata_is_immediate()) {
+ r->fix_metadata_relocation();
+ }
+ }
+ }
+ break;
+ }
+
default:
break;
}
--- a/src/hotspot/share/code/compiledMethod.hpp Tue May 14 12:00:49 2019 +0200
+++ b/src/hotspot/share/code/compiledMethod.hpp Tue May 14 12:07:24 2019 +0200
@@ -395,8 +395,6 @@
private:
bool static clean_ic_if_metadata_is_dead(CompiledIC *ic);
- void clean_ic_stubs();
-
public:
// GC unloading support
// Cleans unloaded klasses and unloaded nmethods in inline caches
--- a/src/hotspot/share/code/nmethod.cpp Tue May 14 12:00:49 2019 +0200
+++ b/src/hotspot/share/code/nmethod.cpp Tue May 14 12:07:24 2019 +0200
@@ -1555,7 +1555,7 @@
// Visit all immediate references that are embedded in the instruction stream.
RelocIterator iter(this, oops_reloc_begin());
while (iter.next()) {
- if (iter.type() == relocInfo::metadata_type ) {
+ if (iter.type() == relocInfo::metadata_type) {
metadata_Relocation* r = iter.metadata_reloc();
// In this metadata, we must only follow those metadatas directly embedded in
// the code. Other metadatas (oop_index>0) are seen as part of
--- a/src/hotspot/share/oops/klass.hpp Tue May 14 12:00:49 2019 +0200
+++ b/src/hotspot/share/oops/klass.hpp Tue May 14 12:07:24 2019 +0200
@@ -333,6 +333,7 @@
static ByteSize secondary_super_cache_offset() { return in_ByteSize(offset_of(Klass, _secondary_super_cache)); }
static ByteSize secondary_supers_offset() { return in_ByteSize(offset_of(Klass, _secondary_supers)); }
static ByteSize java_mirror_offset() { return in_ByteSize(offset_of(Klass, _java_mirror)); }
+ static ByteSize class_loader_data_offset() { return in_ByteSize(offset_of(Klass, _class_loader_data)); }
static ByteSize modifier_flags_offset() { return in_ByteSize(offset_of(Klass, _modifier_flags)); }
static ByteSize layout_helper_offset() { return in_ByteSize(offset_of(Klass, _layout_helper)); }
static ByteSize access_flags_offset() { return in_ByteSize(offset_of(Klass, _access_flags)); }