--- a/src/hotspot/share/gc/cms/concurrentMarkSweepGeneration.cpp Mon Mar 18 15:19:39 2019 +0100
+++ b/src/hotspot/share/gc/cms/concurrentMarkSweepGeneration.cpp Mon Mar 18 15:21:33 2019 +0100
@@ -4248,7 +4248,7 @@
verify_overflow_empty();
if (should_unload_classes()) {
- heap->prune_nmethods();
+ heap->prune_scavengable_nmethods();
}
JvmtiExport::gc_epilogue();
--- a/src/hotspot/share/gc/parallel/parallelScavengeHeap.cpp Mon Mar 18 15:19:39 2019 +0100
+++ b/src/hotspot/share/gc/parallel/parallelScavengeHeap.cpp Mon Mar 18 15:21:33 2019 +0100
@@ -720,7 +720,7 @@
// nothing particular
}
-void ParallelScavengeHeap::prune_nmethods() {
+void ParallelScavengeHeap::prune_scavengable_nmethods() {
ScavengableNMethods::prune_nmethods();
}
--- a/src/hotspot/share/gc/parallel/parallelScavengeHeap.hpp Mon Mar 18 15:19:39 2019 +0100
+++ b/src/hotspot/share/gc/parallel/parallelScavengeHeap.hpp Mon Mar 18 15:21:33 2019 +0100
@@ -162,7 +162,7 @@
virtual void verify_nmethod(nmethod* nm);
virtual void flush_nmethod(nmethod* nm);
- void prune_nmethods();
+ void prune_scavengable_nmethods();
size_t max_capacity() const;
--- a/src/hotspot/share/gc/parallel/psMarkSweep.cpp Mon Mar 18 15:19:39 2019 +0100
+++ b/src/hotspot/share/gc/parallel/psMarkSweep.cpp Mon Mar 18 15:21:33 2019 +0100
@@ -254,7 +254,7 @@
MetaspaceUtils::verify_metrics();
BiasedLocking::restore_marks();
- heap->prune_nmethods();
+ heap->prune_scavengable_nmethods();
JvmtiExport::gc_epilogue();
#if COMPILER2_OR_JVMCI
--- a/src/hotspot/share/gc/parallel/psParallelCompact.cpp Mon Mar 18 15:19:39 2019 +0100
+++ b/src/hotspot/share/gc/parallel/psParallelCompact.cpp Mon Mar 18 15:21:33 2019 +0100
@@ -1061,7 +1061,7 @@
ClassLoaderDataGraph::purge();
MetaspaceUtils::verify_metrics();
- heap->prune_nmethods();
+ heap->prune_scavengable_nmethods();
JvmtiExport::gc_epilogue();
#if COMPILER2_OR_JVMCI
--- a/src/hotspot/share/gc/parallel/psTasks.cpp Mon Mar 18 15:19:39 2019 +0100
+++ b/src/hotspot/share/gc/parallel/psTasks.cpp Mon Mar 18 15:21:33 2019 +0100
@@ -98,8 +98,8 @@
case code_cache:
{
- MarkingCodeBlobClosure each_scavengable_code_blob(&roots_to_old_closure, CodeBlobToOopClosure::FixRelocations);
- ScavengableNMethods::scavengable_nmethods_do(&each_scavengable_code_blob);
+ MarkingCodeBlobClosure code_closure(&roots_to_old_closure, CodeBlobToOopClosure::FixRelocations);
+ ScavengableNMethods::nmethods_do(&code_closure);
AOTLoader::oops_do(&roots_closure);
}
break;
--- a/src/hotspot/share/gc/serial/genMarkSweep.cpp Mon Mar 18 15:19:39 2019 +0100
+++ b/src/hotspot/share/gc/serial/genMarkSweep.cpp Mon Mar 18 15:21:33 2019 +0100
@@ -124,7 +124,7 @@
rs->invalidate_or_clear(old_gen);
}
- gch->prune_nmethods();
+ gch->prune_scavengable_nmethods();
JvmtiExport::gc_epilogue();
// refs processing: clean slate
--- a/src/hotspot/share/gc/shared/genCollectedHeap.cpp Mon Mar 18 15:19:39 2019 +0100
+++ b/src/hotspot/share/gc/shared/genCollectedHeap.cpp Mon Mar 18 15:21:33 2019 +0100
@@ -726,7 +726,7 @@
// Do nothing.
}
-void GenCollectedHeap::prune_nmethods() {
+void GenCollectedHeap::prune_scavengable_nmethods() {
ScavengableNMethods::prune_nmethods();
}
@@ -871,7 +871,7 @@
assert(code_roots != NULL, "must supply closure for code cache");
// We only visit parts of the CodeCache when scavenging.
- ScavengableNMethods::scavengable_nmethods_do(code_roots);
+ ScavengableNMethods::nmethods_do(code_roots);
}
if (so & SO_AllCodeCache) {
assert(code_roots != NULL, "must supply closure for code cache");
--- a/src/hotspot/share/gc/shared/genCollectedHeap.hpp Mon Mar 18 15:19:39 2019 +0100
+++ b/src/hotspot/share/gc/shared/genCollectedHeap.hpp Mon Mar 18 15:21:33 2019 +0100
@@ -255,7 +255,7 @@
virtual void verify_nmethod(nmethod* nm);
virtual void flush_nmethod(nmethod* nm);
- void prune_nmethods();
+ void prune_scavengable_nmethods();
// Iteration functions.
void oop_iterate(OopIterateClosure* cl);
--- a/src/hotspot/share/gc/shared/scavengableNMethods.cpp Mon Mar 18 15:19:39 2019 +0100
+++ b/src/hotspot/share/gc/shared/scavengableNMethods.cpp Mon Mar 18 15:21:33 2019 +0100
@@ -25,13 +25,10 @@
#include "precompiled.hpp"
#include "code/codeCache.hpp"
#include "code/nmethod.hpp"
-#include "compiler/compileTask.hpp"
-#include "gc/shared/collectedHeap.hpp"
#include "gc/shared/scavengableNMethods.hpp"
#include "gc/shared/scavengableNMethodsData.hpp"
-#include "logging/log.hpp"
-#include "logging/logStream.hpp"
#include "memory/universe.hpp"
+#include "runtime/mutexLocker.hpp"
#include "utilities/debug.hpp"
static ScavengableNMethodsData gc_data(nmethod* nm) {
@@ -60,8 +57,6 @@
data.set_next(_head);
_head = nm;
-
- CodeCache::print_trace("register_nmethod", nm);
}
void ScavengableNMethods::unregister_nmethod(nmethod* nm) {
@@ -71,7 +66,6 @@
nmethod* prev = NULL;
for (nmethod* cur = _head; cur != NULL; cur = gc_data(cur).next()) {
if (cur == nm) {
- CodeCache::print_trace("unregister_nmethod", nm);
unlist_nmethod(cur, prev);
return;
}
@@ -127,37 +121,18 @@
class HasScavengableOops: public OopClosure {
BoolObjectClosure* _is_scavengable;
bool _found;
- nmethod* _print_nm;
public:
HasScavengableOops(BoolObjectClosure* is_scavengable, nmethod* nm) :
_is_scavengable(is_scavengable),
- _found(false),
- _print_nm(nm) {}
+ _found(false) {}
bool found() { return _found; }
virtual void do_oop(oop* p) {
- if (*p != NULL && _is_scavengable->do_object_b(*p)) {
- NOT_PRODUCT(maybe_print(p));
+ if (!_found && *p != NULL && _is_scavengable->do_object_b(*p)) {
_found = true;
}
}
virtual void do_oop(narrowOop* p) { ShouldNotReachHere(); }
-
-#ifndef PRODUCT
- void maybe_print(oop* p) {
- LogTarget(Trace, gc, nmethod) lt;
- if (lt.is_enabled()) {
- LogStream ls(lt);
- if (!_found) {
- CompileTask::print(&ls, _print_nm, "new scavengable oop", /*short_form:*/ true);
- }
- ls.print("" PTR_FORMAT "[offset=%d] found scavengable oop " PTR_FORMAT " (found at " PTR_FORMAT ") ",
- p2i(_print_nm), (int)((intptr_t)p - (intptr_t)_print_nm),
- p2i(*p), p2i(p));
- ls.cr();
- }
- }
-#endif //PRODUCT
};
bool ScavengableNMethods::has_scavengable_oops(nmethod* nm) {
@@ -167,42 +142,32 @@
}
// Walk the list of methods which might contain oops to the java heap.
-void ScavengableNMethods::scavengable_nmethods_do(CodeBlobToOopClosure* f) {
+void ScavengableNMethods::nmethods_do_and_prune(CodeBlobToOopClosure* cl) {
assert_locked_or_safepoint(CodeCache_lock);
- const bool fix_relocations = f->fix_relocations();
debug_only(mark_on_list_nmethods());
nmethod* prev = NULL;
nmethod* cur = _head;
while (cur != NULL) {
+ assert(cur->is_alive(), "Must be");
+
ScavengableNMethodsData data = gc_data(cur);
debug_only(data.clear_marked());
- assert(data.not_marked(), "");
assert(data.on_list(), "else shouldn't be on this list");
- bool is_live = (!cur->is_zombie() && !cur->is_unloaded());
- LogTarget(Trace, gc, nmethod) lt;
- if (lt.is_enabled()) {
- LogStream ls(lt);
- CompileTask::print(&ls, cur,
- is_live ? "scavengable root " : "dead scavengable root", /*short_form:*/ true);
- }
- if (is_live) {
- // Perform cur->oops_do(f), maybe just once per nmethod.
- f->do_code_blob(cur);
+ if (cl != NULL) {
+ cl->do_code_blob(cur);
}
+
nmethod* const next = data.next();
- // The scavengable nmethod list must contain all methods with scavengable
- // oops. It is safe to include more nmethod on the list, but we do not
- // expect any live non-scavengable nmethods on the list.
- if (fix_relocations) {
- if (!is_live || !has_scavengable_oops(cur)) {
- unlist_nmethod(cur, prev);
- } else {
- prev = cur;
- }
+
+ if (!has_scavengable_oops(cur)) {
+ unlist_nmethod(cur, prev);
+ } else {
+ prev = cur;
}
+
cur = next;
}
@@ -210,15 +175,24 @@
debug_only(verify_unlisted_nmethods(NULL));
}
+void ScavengableNMethods::prune_nmethods() {
+ nmethods_do_and_prune(NULL /* No closure */);
+}
+
+// Walk the list of methods which might contain oops to the java heap.
+void ScavengableNMethods::nmethods_do(CodeBlobToOopClosure* cl) {
+ nmethods_do_and_prune(cl);
+}
+
#ifndef PRODUCT
-void ScavengableNMethods::asserted_non_scavengable_nmethods_do(CodeBlobClosure* f) {
+void ScavengableNMethods::asserted_non_scavengable_nmethods_do(CodeBlobClosure* cl) {
// While we are here, verify the integrity of the list.
mark_on_list_nmethods();
for (nmethod* cur = _head; cur != NULL; cur = gc_data(cur).next()) {
assert(gc_data(cur).on_list(), "else shouldn't be on this list");
gc_data(cur).clear_marked();
}
- verify_unlisted_nmethods(f);
+ verify_unlisted_nmethods(cl);
}
#endif // PRODUCT
@@ -228,8 +202,6 @@
assert((prev == NULL && _head == nm) ||
(prev != NULL && gc_data(prev).next() == nm), "precondition");
- CodeCache::print_trace("unlist_nmethod", nm);
-
ScavengableNMethodsData data = gc_data(nm);
if (prev == NULL) {
@@ -241,33 +213,6 @@
data.clear_on_list();
}
-void ScavengableNMethods::prune_nmethods() {
- assert_locked_or_safepoint(CodeCache_lock);
-
- debug_only(mark_on_list_nmethods());
-
- nmethod* last = NULL;
- nmethod* cur = _head;
- while (cur != NULL) {
- nmethod* next = gc_data(cur).next();
- debug_only(gc_data(cur).clear_marked());
- assert(gc_data(cur).on_list(), "else shouldn't be on this list");
-
- if (!cur->is_zombie() && !cur->is_unloaded() && has_scavengable_oops(cur)) {
- // Keep it. Advance 'last' to prevent deletion.
- last = cur;
- } else {
- // Prune it from the list, so we don't have to look at it any more.
- CodeCache::print_trace("prune_nmethods", cur);
- unlist_nmethod(cur, last);
- }
- cur = next;
- }
-
- // Check for stray marks.
- debug_only(verify_unlisted_nmethods(NULL));
-}
-
#ifndef PRODUCT
// Temporarily mark nmethods that are claimed to be on the scavenge list.
void ScavengableNMethods::mark_on_list_nmethods() {
@@ -283,15 +228,15 @@
// If the closure is given, run it on the unlisted nmethods.
// Also make sure that the effects of mark_on_list_nmethods is gone.
-void ScavengableNMethods::verify_unlisted_nmethods(CodeBlobClosure* f_or_null) {
+void ScavengableNMethods::verify_unlisted_nmethods(CodeBlobClosure* cl) {
NMethodIterator iter(NMethodIterator::only_alive);
while(iter.next()) {
nmethod* nm = iter.method();
verify_nmethod(nm);
- if (f_or_null != NULL && !gc_data(nm).on_list()) {
- f_or_null->do_code_blob(nm);
+ if (cl != NULL && !gc_data(nm).on_list()) {
+ cl->do_code_blob(nm);
}
}
}
--- a/src/hotspot/share/gc/shared/scavengableNMethods.hpp Mon Mar 18 15:19:39 2019 +0100
+++ b/src/hotspot/share/gc/shared/scavengableNMethods.hpp Mon Mar 18 15:21:33 2019 +0100
@@ -46,24 +46,23 @@
static void unregister_nmethod(nmethod* nm);
static void verify_nmethod(nmethod* nm);
+ // Remove nmethods that no longer have scavengable oops.
static void prune_nmethods();
- // Apply f to every live code blob in scavengable nmethods. Prune nmethods
- // from the list of scavengable nmethods if f->fix_relocations() and a nmethod
- // no longer has scavengable oops. If f->fix_relocations(), then f must copy
- // objects to their new location immediately to avoid fixing nmethods on the
- // basis of the old object locations.
- static void scavengable_nmethods_do(CodeBlobToOopClosure* f);
+ // Apply closure to every scavengable nmethod.
+ // Remove nmethods that no longer have scavengable oops.
+ static void nmethods_do(CodeBlobToOopClosure* cl);
- static void asserted_non_scavengable_nmethods_do(CodeBlobClosure* f = NULL) PRODUCT_RETURN;
+ static void asserted_non_scavengable_nmethods_do(CodeBlobClosure* cl) PRODUCT_RETURN;
private:
+ static void nmethods_do_and_prune(CodeBlobToOopClosure* cl);
static void unlist_nmethod(nmethod* nm, nmethod* prev);
static bool has_scavengable_oops(nmethod* nm);
static void mark_on_list_nmethods() PRODUCT_RETURN;
- static void verify_unlisted_nmethods(CodeBlobClosure* f_or_null) PRODUCT_RETURN;
+ static void verify_unlisted_nmethods(CodeBlobClosure* cl) PRODUCT_RETURN;
};
#endif // SHARE_GC_SHARED_SCAVENGABLENMETHODS_HPP