8213486: SIGSEGV in CompiledMethod::cleanup_inline_caches_impl with AOT
Reviewed-by: kvn, dlong
--- a/src/hotspot/share/aot/aotCompiledMethod.hpp Thu Nov 22 09:44:02 2018 +0100
+++ b/src/hotspot/share/aot/aotCompiledMethod.hpp Thu Nov 22 09:46:24 2018 +0100
@@ -176,6 +176,8 @@
virtual bool is_alive() const { return _is_alive(); }
virtual bool is_in_use() const { return state() == in_use; }
+ virtual bool is_unloading() { return false; }
+
address exception_begin() const { return (address) _code + _meta->exception_handler_offset(); }
virtual const char* name() const { return _name; }
--- a/src/hotspot/share/code/compiledMethod.cpp Thu Nov 22 09:44:02 2018 +0100
+++ b/src/hotspot/share/code/compiledMethod.cpp Thu Nov 22 09:46:24 2018 +0100
@@ -44,11 +44,9 @@
bool caller_must_gc_arguments)
: CodeBlob(name, type, layout, frame_complete_offset, frame_size, oop_maps, caller_must_gc_arguments),
_mark_for_deoptimization_status(not_marked),
- _is_unloading_state(0),
_method(method)
{
init_defaults();
- clear_unloading_state();
}
CompiledMethod::CompiledMethod(Method* method, const char* name, CompilerType type, int size,
@@ -57,11 +55,9 @@
: CodeBlob(name, type, CodeBlobLayout((address) this, size, header_size, cb), cb,
frame_complete_offset, frame_size, oop_maps, caller_must_gc_arguments),
_mark_for_deoptimization_status(not_marked),
- _is_unloading_state(0),
_method(method)
{
init_defaults();
- clear_unloading_state();
}
void CompiledMethod::init_defaults() {
@@ -546,74 +542,6 @@
DEBUG_ONLY(metadata_do(check_class));
}
-// The _is_unloading_state encodes a tuple comprising the unloading cycle
-// and the result of IsUnloadingBehaviour::is_unloading() fpr that cycle.
-// This is the bit layout of the _is_unloading_state byte: 00000CCU
-// CC refers to the cycle, which has 2 bits, and U refers to the result of
-// IsUnloadingBehaviour::is_unloading() for that unloading cycle.
-
-class IsUnloadingState: public AllStatic {
- static const uint8_t _is_unloading_mask = 1;
- static const uint8_t _is_unloading_shift = 0;
- static const uint8_t _unloading_cycle_mask = 6;
- static const uint8_t _unloading_cycle_shift = 1;
-
- static uint8_t set_is_unloading(uint8_t state, bool value) {
- state &= ~_is_unloading_mask;
- if (value) {
- state |= 1 << _is_unloading_shift;
- }
- assert(is_unloading(state) == value, "unexpected unloading cycle overflow");
- return state;
- }
-
- static uint8_t set_unloading_cycle(uint8_t state, uint8_t value) {
- state &= ~_unloading_cycle_mask;
- state |= value << _unloading_cycle_shift;
- assert(unloading_cycle(state) == value, "unexpected unloading cycle overflow");
- return state;
- }
-
-public:
- static bool is_unloading(uint8_t state) { return (state & _is_unloading_mask) >> _is_unloading_shift == 1; }
- static uint8_t unloading_cycle(uint8_t state) { return (state & _unloading_cycle_mask) >> _unloading_cycle_shift; }
-
- static uint8_t create(bool is_unloading, uint8_t unloading_cycle) {
- uint8_t state = 0;
- state = set_is_unloading(state, is_unloading);
- state = set_unloading_cycle(state, unloading_cycle);
- return state;
- }
-};
-
-bool CompiledMethod::is_unloading() {
- uint8_t state = RawAccess<MO_RELAXED>::load(&_is_unloading_state);
- bool state_is_unloading = IsUnloadingState::is_unloading(state);
- uint8_t state_unloading_cycle = IsUnloadingState::unloading_cycle(state);
- if (state_is_unloading) {
- return true;
- }
- if (state_unloading_cycle == CodeCache::unloading_cycle()) {
- return false;
- }
-
- // The IsUnloadingBehaviour is responsible for checking if there are any dead
- // oops in the CompiledMethod, by calling oops_do on it.
- state_unloading_cycle = CodeCache::unloading_cycle();
- state_is_unloading = IsUnloadingBehaviour::current()->is_unloading(this);
-
- state = IsUnloadingState::create(state_is_unloading, state_unloading_cycle);
-
- RawAccess<MO_RELAXED>::store(&_is_unloading_state, state);
-
- return state_is_unloading;
-}
-
-void CompiledMethod::clear_unloading_state() {
- uint8_t state = IsUnloadingState::create(false, CodeCache::unloading_cycle());
- RawAccess<MO_RELAXED>::store(&_is_unloading_state, state);
-}
-
// Called to clean up after class unloading for live nmethods and from the sweeper
// for all methods.
void CompiledMethod::cleanup_inline_caches_impl(bool unloading_occurred, bool clean_all) {
--- a/src/hotspot/share/code/compiledMethod.hpp Thu Nov 22 09:44:02 2018 +0100
+++ b/src/hotspot/share/code/compiledMethod.hpp Thu Nov 22 09:46:24 2018 +0100
@@ -151,8 +151,6 @@
bool _is_far_code; // Code is far from CodeCache.
// Have to use far call instructions to call it from code in CodeCache.
- volatile uint8_t _is_unloading_state; // Local state used to keep track of whether unloading is happening or not
-
// set during construction
unsigned int _has_unsafe_access:1; // May fault due to unsafe access.
unsigned int _has_method_handle_invokes:1; // Has this method MethodHandle invokes?
@@ -390,10 +388,9 @@
// GC unloading support
// Cleans unloaded klasses and unloaded nmethods in inline caches
- bool is_unloading();
+ virtual bool is_unloading() = 0;
void unload_nmethod_caches(bool class_unloading_occurred);
- void clear_unloading_state();
virtual void do_unloading(bool unloading_occurred) { }
private:
--- a/src/hotspot/share/code/nmethod.cpp Thu Nov 22 09:44:02 2018 +0100
+++ b/src/hotspot/share/code/nmethod.cpp Thu Nov 22 09:46:24 2018 +0100
@@ -565,6 +565,7 @@
ByteSize basic_lock_sp_offset,
OopMapSet* oop_maps )
: CompiledMethod(method, "native nmethod", type, nmethod_size, sizeof(nmethod), code_buffer, offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false),
+ _is_unloading_state(0),
_native_receiver_sp_offset(basic_lock_owner_sp_offset),
_native_basic_lock_sp_offset(basic_lock_sp_offset)
{
@@ -609,6 +610,7 @@
code_buffer->copy_code_and_locs_to(this);
code_buffer->copy_values_to(this);
+ clear_unloading_state();
if (ScavengeRootsInCode) {
Universe::heap()->register_nmethod(this);
}
@@ -672,6 +674,7 @@
#endif
)
: CompiledMethod(method, "nmethod", type, nmethod_size, sizeof(nmethod), code_buffer, offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false),
+ _is_unloading_state(0),
_native_receiver_sp_offset(in_ByteSize(-1)),
_native_basic_lock_sp_offset(in_ByteSize(-1))
{
@@ -1505,6 +1508,74 @@
if (_method != NULL) f(_method);
}
+// The _is_unloading_state encodes a tuple comprising the unloading cycle
+// and the result of IsUnloadingBehaviour::is_unloading() fpr that cycle.
+// This is the bit layout of the _is_unloading_state byte: 00000CCU
+// CC refers to the cycle, which has 2 bits, and U refers to the result of
+// IsUnloadingBehaviour::is_unloading() for that unloading cycle.
+
+class IsUnloadingState: public AllStatic {
+ static const uint8_t _is_unloading_mask = 1;
+ static const uint8_t _is_unloading_shift = 0;
+ static const uint8_t _unloading_cycle_mask = 6;
+ static const uint8_t _unloading_cycle_shift = 1;
+
+ static uint8_t set_is_unloading(uint8_t state, bool value) {
+ state &= ~_is_unloading_mask;
+ if (value) {
+ state |= 1 << _is_unloading_shift;
+ }
+ assert(is_unloading(state) == value, "unexpected unloading cycle overflow");
+ return state;
+ }
+
+ static uint8_t set_unloading_cycle(uint8_t state, uint8_t value) {
+ state &= ~_unloading_cycle_mask;
+ state |= value << _unloading_cycle_shift;
+ assert(unloading_cycle(state) == value, "unexpected unloading cycle overflow");
+ return state;
+ }
+
+public:
+ static bool is_unloading(uint8_t state) { return (state & _is_unloading_mask) >> _is_unloading_shift == 1; }
+ static uint8_t unloading_cycle(uint8_t state) { return (state & _unloading_cycle_mask) >> _unloading_cycle_shift; }
+
+ static uint8_t create(bool is_unloading, uint8_t unloading_cycle) {
+ uint8_t state = 0;
+ state = set_is_unloading(state, is_unloading);
+ state = set_unloading_cycle(state, unloading_cycle);
+ return state;
+ }
+};
+
+bool nmethod::is_unloading() {
+ uint8_t state = RawAccess<MO_RELAXED>::load(&_is_unloading_state);
+ bool state_is_unloading = IsUnloadingState::is_unloading(state);
+ uint8_t state_unloading_cycle = IsUnloadingState::unloading_cycle(state);
+ if (state_is_unloading) {
+ return true;
+ }
+ if (state_unloading_cycle == CodeCache::unloading_cycle()) {
+ return false;
+ }
+
+ // The IsUnloadingBehaviour is responsible for checking if there are any dead
+ // oops in the CompiledMethod, by calling oops_do on it.
+ state_unloading_cycle = CodeCache::unloading_cycle();
+ state_is_unloading = IsUnloadingBehaviour::current()->is_unloading(this);
+
+ state = IsUnloadingState::create(state_is_unloading, state_unloading_cycle);
+
+ RawAccess<MO_RELAXED>::store(&_is_unloading_state, state);
+
+ return state_is_unloading;
+}
+
+void nmethod::clear_unloading_state() {
+ uint8_t state = IsUnloadingState::create(false, CodeCache::unloading_cycle());
+ RawAccess<MO_RELAXED>::store(&_is_unloading_state, state);
+}
+
// This is called at the end of the strong tracing/marking phase of a
// GC to unload an nmethod if it contains otherwise unreachable
--- a/src/hotspot/share/code/nmethod.hpp Thu Nov 22 09:44:02 2018 +0100
+++ b/src/hotspot/share/code/nmethod.hpp Thu Nov 22 09:46:24 2018 +0100
@@ -158,6 +158,9 @@
// counter is decreased (by 1) while sweeping.
int _hotness_counter;
+ // Local state used to keep track of whether unloading is happening or not
+ volatile uint8_t _is_unloading_state;
+
// These are used for compiled synchronized native methods to
// locate the owner and stack slot for the BasicLock so that we can
// properly revoke the bias of the owner if necessary. They are
@@ -323,6 +326,8 @@
bool is_zombie() const { return _state == zombie; }
bool is_unloaded() const { return _state == unloaded; }
+ void clear_unloading_state();
+ virtual bool is_unloading();
virtual void do_unloading(bool unloading_occurred);
#if INCLUDE_RTM_OPT