--- a/hotspot/src/share/vm/code/nmethod.cpp Tue Sep 15 11:09:34 2009 -0700
+++ b/hotspot/src/share/vm/code/nmethod.cpp Tue Sep 15 21:53:47 2009 -0700
@@ -581,10 +581,13 @@
debug_only(No_Safepoint_Verifier nsv;)
assert_locked_or_safepoint(CodeCache_lock);
- NOT_PRODUCT(_has_debug_info = false; )
+ NOT_PRODUCT(_has_debug_info = false);
+ _oops_do_mark_link = NULL;
_method = method;
_entry_bci = InvocationEntryBci;
- _link = NULL;
+ _osr_link = NULL;
+ _scavenge_root_link = NULL;
+ _scavenge_root_state = 0;
_compiler = NULL;
// We have no exception handler or deopt handler make the
// values something that will never match a pc like the nmethod vtable entry
@@ -618,7 +621,7 @@
_stack_traversal_mark = 0;
code_buffer->copy_oops_to(this);
- debug_only(check_store();)
+ debug_only(verify_scavenge_root_oops());
CodeCache::commit(this);
VTune::create_nmethod(this);
}
@@ -668,10 +671,13 @@
debug_only(No_Safepoint_Verifier nsv;)
assert_locked_or_safepoint(CodeCache_lock);
- NOT_PRODUCT(_has_debug_info = false; )
+ NOT_PRODUCT(_has_debug_info = false);
+ _oops_do_mark_link = NULL;
_method = method;
_entry_bci = InvocationEntryBci;
- _link = NULL;
+ _osr_link = NULL;
+ _scavenge_root_link = NULL;
+ _scavenge_root_state = 0;
_compiler = NULL;
// We have no exception handler or deopt handler make the
// values something that will never match a pc like the nmethod vtable entry
@@ -703,7 +709,7 @@
_stack_traversal_mark = 0;
code_buffer->copy_oops_to(this);
- debug_only(check_store();)
+ debug_only(verify_scavenge_root_oops());
CodeCache::commit(this);
VTune::create_nmethod(this);
}
@@ -770,12 +776,15 @@
debug_only(No_Safepoint_Verifier nsv;)
assert_locked_or_safepoint(CodeCache_lock);
- NOT_PRODUCT(_has_debug_info = false; )
+ NOT_PRODUCT(_has_debug_info = false);
+ _oops_do_mark_link = NULL;
_method = method;
_compile_id = compile_id;
_comp_level = comp_level;
_entry_bci = entry_bci;
- _link = NULL;
+ _osr_link = NULL;
+ _scavenge_root_link = NULL;
+ _scavenge_root_state = 0;
_compiler = compiler;
_orig_pc_offset = orig_pc_offset;
#ifdef HAVE_DTRACE_H
@@ -813,7 +822,10 @@
code_buffer->copy_oops_to(this);
debug_info->copy_to(this);
dependencies->copy_to(this);
- debug_only(check_store();)
+ if (ScavengeRootsInCode && detect_scavenge_root_oops()) {
+ CodeCache::add_scavenge_root_nmethod(this);
+ }
+ debug_only(verify_scavenge_root_oops());
CodeCache::commit(this);
@@ -902,23 +914,30 @@
if (st != NULL) {
ttyLocker ttyl;
// Print a little tag line that looks like +PrintCompilation output:
- st->print("%3d%c %s",
+ int tlen = (int) strlen(title);
+ bool do_nl = false;
+ if (tlen > 0 && title[tlen-1] == '\n') { tlen--; do_nl = true; }
+ st->print("%3d%c %.*s",
compile_id(),
is_osr_method() ? '%' :
method() != NULL &&
is_native_method() ? 'n' : ' ',
- title);
+ tlen, title);
#ifdef TIERED
st->print(" (%d) ", comp_level());
#endif // TIERED
if (WizardMode) st->print(" (" INTPTR_FORMAT ")", this);
- if (method() != NULL) {
- method()->print_short_name(st);
+ if (Universe::heap()->is_gc_active() && method() != NULL) {
+ st->print("(method)");
+ } else if (method() != NULL) {
+ method()->print_short_name(st);
if (is_osr_method())
st->print(" @ %d", osr_entry_bci());
if (method()->code_size() > 0)
st->print(" (%d bytes)", method()->code_size());
}
+
+ if (do_nl) st->cr();
}
}
@@ -1033,6 +1052,7 @@
}
}
+// This is a private interface with the sweeper.
void nmethod::mark_as_seen_on_stack() {
assert(is_not_entrant(), "must be a non-entrant method");
set_stack_traversal_mark(NMethodSweeper::traversal_count());
@@ -1077,7 +1097,8 @@
" unloadable], methodOop(" INTPTR_FORMAT
"), cause(" INTPTR_FORMAT ")",
this, (address)_method, (address)cause);
- cause->klass()->print();
+ if (!Universe::heap()->is_gc_active())
+ cause->klass()->print();
}
// If _method is already NULL the methodOop is about to be unloaded,
// so we don't have to break the cycle. Note that it is possible to
@@ -1105,7 +1126,8 @@
// The methodOop is gone at this point
assert(_method == NULL, "Tautology");
- set_link(NULL);
+ set_osr_link(NULL);
+ //set_scavenge_root_link(NULL); // done by prune_scavenge_root_nmethods
NMethodSweeper::notify(this);
}
@@ -1291,6 +1313,10 @@
ec = next;
}
+ if (on_scavenge_root_list()) {
+ CodeCache::drop_scavenge_root_nmethod(this);
+ }
+
((CodeBlob*)(this))->flush();
CodeCache::free(this);
@@ -1350,7 +1376,10 @@
return false;
}
}
- assert(unloading_occurred, "Inconsistency in unloading");
+ // If ScavengeRootsInCode is true, an nmethod might be unloaded
+ // simply because one of its constant oops has gone dead.
+ // No actual classes need to be unloaded in order for this to occur.
+ assert(unloading_occurred || ScavengeRootsInCode, "Inconsistency in unloading");
make_unloaded(is_alive, obj);
return true;
}
@@ -1558,12 +1587,108 @@
}
// Scopes
+ // This includes oop constants not inlined in the code stream.
for (oop* p = oops_begin(); p < oops_end(); p++) {
if (*p == Universe::non_oop_word()) continue; // skip non-oops
f->do_oop(p);
}
}
+#define NMETHOD_SENTINEL ((nmethod*)badAddress)
+
+nmethod* volatile nmethod::_oops_do_mark_nmethods;
+
+// An nmethod is "marked" if its _mark_link is set non-null.
+// Even if it is the end of the linked list, it will have a non-null link value,
+// as long as it is on the list.
+// This code must be MP safe, because it is used from parallel GC passes.
+bool nmethod::test_set_oops_do_mark() {
+ assert(nmethod::oops_do_marking_is_active(), "oops_do_marking_prologue must be called");
+ nmethod* observed_mark_link = _oops_do_mark_link;
+ if (observed_mark_link == NULL) {
+ // Claim this nmethod for this thread to mark.
+ observed_mark_link = (nmethod*)
+ Atomic::cmpxchg_ptr(NMETHOD_SENTINEL, &_oops_do_mark_link, NULL);
+ if (observed_mark_link == NULL) {
+
+ // Atomically append this nmethod (now claimed) to the head of the list:
+ nmethod* observed_mark_nmethods = _oops_do_mark_nmethods;
+ for (;;) {
+ nmethod* required_mark_nmethods = observed_mark_nmethods;
+ _oops_do_mark_link = required_mark_nmethods;
+ observed_mark_nmethods = (nmethod*)
+ Atomic::cmpxchg_ptr(this, &_oops_do_mark_nmethods, required_mark_nmethods);
+ if (observed_mark_nmethods == required_mark_nmethods)
+ break;
+ }
+ // Mark was clear when we first saw this guy.
+ NOT_PRODUCT(if (TraceScavenge) print_on(tty, "oops_do, mark\n"));
+ return false;
+ }
+ }
+ // On fall through, another racing thread marked this nmethod before we did.
+ return true;
+}
+
+void nmethod::oops_do_marking_prologue() {
+ NOT_PRODUCT(if (TraceScavenge) tty->print_cr("[oops_do_marking_prologue"));
+ assert(_oops_do_mark_nmethods == NULL, "must not call oops_do_marking_prologue twice in a row");
+ // We use cmpxchg_ptr instead of regular assignment here because the user
+ // may fork a bunch of threads, and we need them all to see the same state.
+ void* observed = Atomic::cmpxchg_ptr(NMETHOD_SENTINEL, &_oops_do_mark_nmethods, NULL);
+ guarantee(observed == NULL, "no races in this sequential code");
+}
+
+void nmethod::oops_do_marking_epilogue() {
+ assert(_oops_do_mark_nmethods != NULL, "must not call oops_do_marking_epilogue twice in a row");
+ nmethod* cur = _oops_do_mark_nmethods;
+ while (cur != NMETHOD_SENTINEL) {
+ assert(cur != NULL, "not NULL-terminated");
+ nmethod* next = cur->_oops_do_mark_link;
+ cur->_oops_do_mark_link = NULL;
+ NOT_PRODUCT(if (TraceScavenge) cur->print_on(tty, "oops_do, unmark\n"));
+ cur = next;
+ }
+ void* required = _oops_do_mark_nmethods;
+ void* observed = Atomic::cmpxchg_ptr(NULL, &_oops_do_mark_nmethods, required);
+ guarantee(observed == required, "no races in this sequential code");
+ NOT_PRODUCT(if (TraceScavenge) tty->print_cr("oops_do_marking_epilogue]"));
+}
+
+class DetectScavengeRoot: public OopClosure {
+ bool _detected_scavenge_root;
+public:
+ DetectScavengeRoot() : _detected_scavenge_root(false)
+ { NOT_PRODUCT(_print_nm = NULL); }
+ bool detected_scavenge_root() { return _detected_scavenge_root; }
+ virtual void do_oop(oop* p) {
+ if ((*p) != NULL && (*p)->is_scavengable()) {
+ NOT_PRODUCT(maybe_print(p));
+ _detected_scavenge_root = true;
+ }
+ }
+ virtual void do_oop(narrowOop* p) { ShouldNotReachHere(); }
+
+#ifndef PRODUCT
+ nmethod* _print_nm;
+ void maybe_print(oop* p) {
+ if (_print_nm == NULL) return;
+ if (!_detected_scavenge_root) _print_nm->print_on(tty, "new scavenge root");
+ tty->print_cr(""PTR_FORMAT"[offset=%d] detected non-perm oop "PTR_FORMAT" (found at "PTR_FORMAT")",
+ _print_nm, (int)((intptr_t)p - (intptr_t)_print_nm),
+ (intptr_t)(*p), (intptr_t)p);
+ (*p)->print();
+ }
+#endif //PRODUCT
+};
+
+bool nmethod::detect_scavenge_root_oops() {
+ DetectScavengeRoot detect_scavenge_root;
+ NOT_PRODUCT(if (TraceScavenge) detect_scavenge_root._print_nm = this);
+ oops_do(&detect_scavenge_root);
+ return detect_scavenge_root.detected_scavenge_root();
+}
+
// Method that knows how to preserve outgoing arguments at call. This method must be
// called with a frame corresponding to a Java invoke
void nmethod::preserve_callee_argument_oops(frame fr, const RegisterMap *reg_map, OopClosure* f) {
@@ -1878,6 +2003,24 @@
// -----------------------------------------------------------------------------
// Verification
+class VerifyOopsClosure: public OopClosure {
+ nmethod* _nm;
+ bool _ok;
+public:
+ VerifyOopsClosure(nmethod* nm) : _nm(nm), _ok(true) { }
+ bool ok() { return _ok; }
+ virtual void do_oop(oop* p) {
+ if ((*p) == NULL || (*p)->is_oop()) return;
+ if (_ok) {
+ _nm->print_nmethod(true);
+ _ok = false;
+ }
+ tty->print_cr("*** non-oop "PTR_FORMAT" found at "PTR_FORMAT" (offset %d)",
+ (intptr_t)(*p), (intptr_t)p, (int)((intptr_t)p - (intptr_t)_nm));
+ }
+ virtual void do_oop(narrowOop* p) { ShouldNotReachHere(); }
+};
+
void nmethod::verify() {
// Hmm. OSR methods can be deopted but not marked as zombie or not_entrant
@@ -1911,6 +2054,11 @@
}
}
+ VerifyOopsClosure voc(this);
+ oops_do(&voc);
+ assert(voc.ok(), "embedded oops must be OK");
+ verify_scavenge_root_oops();
+
verify_scopes();
}
@@ -1974,19 +2122,34 @@
// Non-product code
#ifndef PRODUCT
-void nmethod::check_store() {
- // Make sure all oops in the compiled code are tenured
+class DebugScavengeRoot: public OopClosure {
+ nmethod* _nm;
+ bool _ok;
+public:
+ DebugScavengeRoot(nmethod* nm) : _nm(nm), _ok(true) { }
+ bool ok() { return _ok; }
+ virtual void do_oop(oop* p) {
+ if ((*p) == NULL || !(*p)->is_scavengable()) return;
+ if (_ok) {
+ _nm->print_nmethod(true);
+ _ok = false;
+ }
+ tty->print_cr("*** non-perm oop "PTR_FORMAT" found at "PTR_FORMAT" (offset %d)",
+ (intptr_t)(*p), (intptr_t)p, (int)((intptr_t)p - (intptr_t)_nm));
+ (*p)->print();
+ }
+ virtual void do_oop(narrowOop* p) { ShouldNotReachHere(); }
+};
- RelocIterator iter(this);
- while (iter.next()) {
- if (iter.type() == relocInfo::oop_type) {
- oop_Relocation* reloc = iter.oop_reloc();
- oop obj = reloc->oop_value();
- if (obj != NULL && !obj->is_perm()) {
- fatal("must be permanent oop in compiled code");
- }
- }
+void nmethod::verify_scavenge_root_oops() {
+ if (!on_scavenge_root_list()) {
+ // Actually look inside, to verify the claim that it's clean.
+ DebugScavengeRoot debug_scavenge_root(this);
+ oops_do(&debug_scavenge_root);
+ if (!debug_scavenge_root.ok())
+ fatal("found an unadvertised bad non-perm oop in the code cache");
}
+ assert(scavenge_root_not_marked(), "");
}
#endif // PRODUCT
@@ -2019,6 +2182,7 @@
if (is_not_entrant()) tty->print("not_entrant ");
if (is_zombie()) tty->print("zombie ");
if (is_unloaded()) tty->print("unloaded ");
+ if (on_scavenge_root_list()) tty->print("scavenge_root ");
tty->print_cr("}:");
}
if (size () > 0) tty->print_cr(" total in heap [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",