8208677: Move inner metaspace cleaning out of class unloading
Summary: move to safepoint cleanup actions to do if needed.
Reviewed-by: eosterlund, hseigel
--- a/src/hotspot/share/classfile/classLoaderData.cpp Wed Aug 08 15:11:11 2018 -0400
+++ b/src/hotspot/share/classfile/classLoaderData.cpp Wed Aug 08 15:24:21 2018 -0400
@@ -95,7 +95,7 @@
ClassLoaderDataGraph::_head = _the_null_class_loader_data;
assert(_the_null_class_loader_data->is_the_null_class_loader_data(), "Must be");
- LogTarget(Debug, class, loader, data) lt;
+ LogTarget(Trace, class, loader, data) lt;
if (lt.is_enabled()) {
ResourceMark rm;
LogStream ls(lt);
@@ -595,7 +595,7 @@
void ClassLoaderData::unload() {
_unloading = true;
- LogTarget(Debug, class, loader, data) lt;
+ LogTarget(Trace, class, loader, data) lt;
if (lt.is_enabled()) {
ResourceMark rm;
LogStream ls(lt);
@@ -606,7 +606,7 @@
// Some items on the _deallocate_list need to free their C heap structures
// if they are not already on the _klasses list.
- unload_deallocate_list();
+ free_deallocate_list_C_heap_structures();
// Tell serviceability tools these classes are unloading
// after erroneous classes are released.
@@ -849,7 +849,7 @@
}
// Add this metadata pointer to be freed when it's safe. This is only during
-// class unloading because Handles might point to this metadata field.
+// a safepoint which checks if handles point to this metadata field.
void ClassLoaderData::add_to_deallocate_list(Metadata* m) {
// Metadata in shared region isn't deleted.
if (!m->is_shared()) {
@@ -858,6 +858,8 @@
_deallocate_list = new (ResourceObj::C_HEAP, mtClass) GrowableArray<Metadata*>(100, true);
}
_deallocate_list->append_if_missing(m);
+ log_debug(class, loader, data)("deallocate added for %s", m->print_value_string());
+ ClassLoaderDataGraph::set_should_clean_deallocate_lists();
}
}
@@ -891,8 +893,44 @@
assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(),
"scratch classes on this list should be dead");
// Also should assert that other metadata on the list was found in handles.
+ // Some cleaning remains.
+ ClassLoaderDataGraph::set_should_clean_deallocate_lists();
+ }
+ }
+}
+
+void ClassLoaderDataGraph::clean_deallocate_lists(bool walk_previous_versions) {
+ uint loaders_processed = 0;
+ for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
+ // is_alive check will be necessary for concurrent class unloading.
+ if (cld->is_alive()) {
+ // clean metaspace
+ if (walk_previous_versions) {
+ cld->classes_do(InstanceKlass::purge_previous_versions);
+ }
+ cld->free_deallocate_list();
+ loaders_processed++;
}
}
+ log_debug(class, loader, data)("clean_deallocate_lists: loaders processed %u %s",
+ loaders_processed, walk_previous_versions ? "walk_previous_versions" : "");
+}
+
+void ClassLoaderDataGraph::walk_metadata_and_clean_metaspaces() {
+ assert(SafepointSynchronize::is_at_safepoint(), "must only be called at safepoint");
+
+ _should_clean_deallocate_lists = false; // assume everything gets cleaned
+
+ // Mark metadata seen on the stack so we can delete unreferenced entries.
+ // Walk all metadata, including the expensive code cache walk, only for class redefinition.
+ // The MetadataOnStackMark walk during redefinition saves previous versions if it finds old methods
+ // on the stack or in the code cache, so we only have to repeat the full walk if
+ // they were found at that time.
+ // TODO: have redefinition clean old methods out of the code cache. They still exist in some places.
+ bool walk_all_metadata = InstanceKlass::has_previous_versions_and_reset();
+
+ MetadataOnStackMark md_on_stack(walk_all_metadata);
+ clean_deallocate_lists(walk_all_metadata);
}
// This is distinct from free_deallocate_list. For class loader data that are
@@ -900,7 +938,7 @@
// scratch or error classes so that unloading events aren't triggered for these
// classes. The metadata is removed with the unloading metaspace.
// There isn't C heap memory allocated for methods, so nothing is done for them.
-void ClassLoaderData::unload_deallocate_list() {
+void ClassLoaderData::free_deallocate_list_C_heap_structures() {
// Don't need lock, at safepoint
assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
assert(is_unloading(), "only called for ClassLoaderData that are unloading");
@@ -910,7 +948,6 @@
// Go backwards because this removes entries that are freed.
for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
Metadata* m = _deallocate_list->at(i);
- assert (!m->on_stack(), "wouldn't be unloading if this were so");
_deallocate_list->remove_at(i);
if (m->is_constantPool()) {
((ConstantPool*)m)->release_C_heap_structures();
@@ -1026,6 +1063,8 @@
ClassLoaderData* ClassLoaderDataGraph::_saved_head = NULL;
bool ClassLoaderDataGraph::_should_purge = false;
+bool ClassLoaderDataGraph::_should_clean_deallocate_lists = false;
+bool ClassLoaderDataGraph::_safepoint_cleanup_needed = false;
bool ClassLoaderDataGraph::_metaspace_oom = false;
// Add a new class loader data node to the list. Assign the newly created
@@ -1056,7 +1095,7 @@
cld->set_next(next);
ClassLoaderData* exchanged = Atomic::cmpxchg(cld, list_head, next);
if (exchanged == next) {
- LogTarget(Debug, class, loader, data) lt;
+ LogTarget(Trace, class, loader, data) lt;
if (lt.is_enabled()) {
ResourceMark rm;
LogStream ls(lt);
@@ -1337,7 +1376,10 @@
// Move class loader data from main list to the unloaded list for unloading
// and deallocation later.
-bool ClassLoaderDataGraph::do_unloading(bool clean_previous_versions) {
+bool ClassLoaderDataGraph::do_unloading(bool do_cleaning) {
+
+ // Indicate whether safepoint cleanup is needed.
+ _safepoint_cleanup_needed |= do_cleaning;
ClassLoaderData* data = _head;
ClassLoaderData* prev = NULL;
@@ -1345,15 +1387,6 @@
uint loaders_processed = 0;
uint loaders_removed = 0;
- // Mark metadata seen on the stack only so we can delete unneeded entries.
- // Only walk all metadata, including the expensive code cache walk, for Full GC
- // and only if class redefinition and if there's previous versions of
- // Klasses to delete.
- bool walk_all_metadata = clean_previous_versions &&
- JvmtiExport::has_redefined_a_class() &&
- InstanceKlass::has_previous_versions_and_reset();
- MetadataOnStackMark md_on_stack(walk_all_metadata);
-
// Save previous _unloading pointer for CMS which may add to unloading list before
// purging and we don't want to rewalk the previously unloaded class loader data.
_saved_unloading = _unloading;
@@ -1361,11 +1394,6 @@
data = _head;
while (data != NULL) {
if (data->is_alive()) {
- // clean metaspace
- if (walk_all_metadata) {
- data->classes_do(InstanceKlass::purge_previous_versions);
- }
- data->free_deallocate_list();
prev = data;
data = data->next();
loaders_processed++;
--- a/src/hotspot/share/classfile/classLoaderData.hpp Wed Aug 08 15:11:11 2018 -0400
+++ b/src/hotspot/share/classfile/classLoaderData.hpp Wed Aug 08 15:24:21 2018 -0400
@@ -79,6 +79,12 @@
static ClassLoaderData* _saved_head;
static ClassLoaderData* _saved_unloading;
static bool _should_purge;
+
+ // Set if there's anything to purge in the deallocate lists or previous versions
+ // during a safepoint after class unloading in a full GC.
+ static bool _should_clean_deallocate_lists;
+ static bool _safepoint_cleanup_needed;
+
// OOM has been seen in metaspace allocation. Used to prevent some
// allocations until class unloading
static bool _metaspace_oom;
@@ -88,6 +94,7 @@
static ClassLoaderData* add_to_graph(Handle class_loader, bool anonymous);
static ClassLoaderData* add(Handle class_loader, bool anonymous);
+
public:
static ClassLoaderData* find_or_create(Handle class_loader);
static void purge();
@@ -116,7 +123,13 @@
static void packages_unloading_do(void f(PackageEntry*));
static void loaded_classes_do(KlassClosure* klass_closure);
static void classes_unloading_do(void f(Klass* const));
- static bool do_unloading(bool clean_previous_versions);
+ static bool do_unloading(bool do_cleaning);
+
+ // Expose state to avoid logging overhead in safepoint cleanup tasks.
+ static inline bool should_clean_metaspaces_and_reset();
+ static void set_should_clean_deallocate_lists() { _should_clean_deallocate_lists = true; }
+ static void clean_deallocate_lists(bool purge_previous_versions);
+ static void walk_metadata_and_clean_metaspaces();
// dictionary do
// Iterate over all klasses in dictionary, but
@@ -297,8 +310,8 @@
void packages_do(void f(PackageEntry*));
// Deallocate free list during class unloading.
- void free_deallocate_list(); // for the classes that are not unloaded
- void unload_deallocate_list(); // for the classes that are unloaded
+ void free_deallocate_list(); // for the classes that are not unloaded
+ void free_deallocate_list_C_heap_structures(); // for the classes that are unloaded
// Allocate out of this class loader data
MetaWord* allocate(size_t size);
--- a/src/hotspot/share/classfile/classLoaderData.inline.hpp Wed Aug 08 15:11:11 2018 -0400
+++ b/src/hotspot/share/classfile/classLoaderData.inline.hpp Wed Aug 08 15:24:21 2018 -0400
@@ -27,6 +27,7 @@
#include "classfile/classLoaderData.hpp"
#include "classfile/javaClasses.hpp"
+#include "oops/instanceKlass.hpp"
#include "oops/oop.inline.hpp"
#include "oops/oopHandle.inline.hpp"
#include "oops/weakHandle.inline.hpp"
@@ -92,4 +93,10 @@
Atomic::sub(count, &_num_array_classes);
}
+bool ClassLoaderDataGraph::should_clean_metaspaces_and_reset() {
+ bool do_cleaning = _safepoint_cleanup_needed;
+ _safepoint_cleanup_needed = false; // reset
+ return (do_cleaning && _should_clean_deallocate_lists) || InstanceKlass::has_previous_versions();
+}
+
#endif // SHARE_VM_CLASSFILE_CLASSLOADERDATA_INLINE_HPP
--- a/src/hotspot/share/code/nmethod.cpp Wed Aug 08 15:11:11 2018 -0400
+++ b/src/hotspot/share/code/nmethod.cpp Wed Aug 08 15:24:21 2018 -0400
@@ -1540,6 +1540,7 @@
}
} else if (iter.type() == relocInfo::virtual_call_type) {
// Check compiledIC holders associated with this nmethod
+ ResourceMark rm;
CompiledIC *ic = CompiledIC_at(&iter);
if (ic->is_icholder_call()) {
CompiledICHolder* cichk = ic->cached_icholder();
--- a/src/hotspot/share/oops/constantPool.cpp Wed Aug 08 15:11:11 2018 -0400
+++ b/src/hotspot/share/oops/constantPool.cpp Wed Aug 08 15:24:21 2018 -0400
@@ -2490,9 +2490,9 @@
if (has_preresolution()) st->print("/preresolution");
if (operands() != NULL) st->print("/operands[%d]", operands()->length());
print_address_on(st);
- st->print(" for ");
- pool_holder()->print_value_on(st);
if (pool_holder() != NULL) {
+ st->print(" for ");
+ pool_holder()->print_value_on(st);
bool extra = (pool_holder()->constants() != this);
if (extra) st->print(" (extra)");
}
--- a/src/hotspot/share/oops/instanceKlass.cpp Wed Aug 08 15:11:11 2018 -0400
+++ b/src/hotspot/share/oops/instanceKlass.cpp Wed Aug 08 15:24:21 2018 -0400
@@ -3665,7 +3665,7 @@
// unloading only. Also resets the flag to false. purge_previous_version
// will set the flag to true if there are any left, i.e., if there's any
// work to do for next time. This is to avoid the expensive code cache
-// walk in CLDG::do_unloading().
+// walk in CLDG::clean_deallocate_lists().
bool InstanceKlass::has_previous_versions_and_reset() {
bool ret = _has_previous_versions;
log_trace(redefine, class, iklass, purge)("Class unloading: has_previous_versions = %s",
--- a/src/hotspot/share/oops/instanceKlass.hpp Wed Aug 08 15:11:11 2018 -0400
+++ b/src/hotspot/share/oops/instanceKlass.hpp Wed Aug 08 15:24:21 2018 -0400
@@ -839,6 +839,7 @@
}
static bool has_previous_versions_and_reset();
+ static bool has_previous_versions() { return _has_previous_versions; }
// JVMTI: Support for caching a class file before it is modified by an agent that can do retransformation
void set_cached_class_file(JvmtiCachedClassFileData *data) {
--- a/src/hotspot/share/prims/jvmtiRedefineClasses.cpp Wed Aug 08 15:11:11 2018 -0400
+++ b/src/hotspot/share/prims/jvmtiRedefineClasses.cpp Wed Aug 08 15:24:21 2018 -0400
@@ -237,6 +237,9 @@
#ifdef PRODUCT
}
#endif
+
+ // Clean up any metadata now unreferenced while MetadataOnStackMark is set.
+ ClassLoaderDataGraph::clean_deallocate_lists(false);
}
void VM_RedefineClasses::doit_epilogue() {
--- a/src/hotspot/share/runtime/safepoint.cpp Wed Aug 08 15:11:11 2018 -0400
+++ b/src/hotspot/share/runtime/safepoint.cpp Wed Aug 08 15:24:21 2018 -0400
@@ -23,7 +23,7 @@
*/
#include "precompiled.hpp"
-#include "classfile/classLoaderData.hpp"
+#include "classfile/classLoaderData.inline.hpp"
#include "classfile/stringTable.hpp"
#include "classfile/symbolTable.hpp"
#include "classfile/systemDictionary.hpp"
@@ -719,6 +719,7 @@
post_safepoint_cleanup_task_event(&event, name);
}
}
+
_subtasks.all_tasks_completed(_num_workers);
}
};
@@ -748,8 +749,18 @@
cleanup.work(0);
}
+ // Needs to be done single threaded by the VMThread. This walks
+ // the thread stacks looking for references to metadata before
+ // deciding to remove it from the metaspaces.
+ if (ClassLoaderDataGraph::should_clean_metaspaces_and_reset()) {
+ const char* name = "cleanup live ClassLoaderData metaspaces";
+ TraceTime timer(name, TRACETIME_LOG(Info, safepoint, cleanup));
+ ClassLoaderDataGraph::walk_metadata_and_clean_metaspaces();
+ }
+
// Finish monitor deflation.
ObjectSynchronizer::finish_deflate_idle_monitors(&deflate_counters);
+
}
--- a/test/hotspot/jtreg/runtime/RedefineTests/RedefineRunningMethods.java Wed Aug 08 15:11:11 2018 -0400
+++ b/test/hotspot/jtreg/runtime/RedefineTests/RedefineRunningMethods.java Wed Aug 08 15:24:21 2018 -0400
@@ -31,9 +31,8 @@
* java.instrument
* jdk.jartool/sun.tools.jar
* @run main RedefineClassHelper
- * @run main/othervm/timeout=180 -javaagent:redefineagent.jar -Xlog:redefine+class+iklass+add=trace,redefine+class+iklass+purge=trace,all=trace:file=all.log RedefineRunningMethods
+ * @run main/othervm/timeout=180 -javaagent:redefineagent.jar -Xlog:redefine+class+iklass+add=trace,redefine+class+iklass+purge=trace,class+loader+data=debug,safepoint+cleanup,gc+phases=debug:rt.log RedefineRunningMethods
*/
-// Test is executed with full trace logging redirected to a file to ensure there is no crash during logging anonymous classes - see JDK-8197901
// package access top-level class to avoid problem with RedefineClassHelper