8185141: Generalize scavengeable nmethod root handling
authoreosterlund
Fri, 13 Oct 2017 15:08:56 +0200
changeset 47624 b055cb5170f5
parent 47623 0a5f1b851890
child 47625 60dae13727b8
8185141: Generalize scavengeable nmethod root handling Reviewed-by: tschatzl, pliden, rkennke
src/hotspot/share/c1/c1_Runtime1.cpp
src/hotspot/share/code/codeCache.cpp
src/hotspot/share/code/codeCache.hpp
src/hotspot/share/code/nmethod.cpp
src/hotspot/share/gc/g1/g1CollectedHeap.cpp
src/hotspot/share/gc/g1/g1CollectedHeap.hpp
src/hotspot/share/gc/parallel/parallelScavengeHeap.cpp
src/hotspot/share/gc/parallel/parallelScavengeHeap.hpp
src/hotspot/share/gc/shared/collectedHeap.cpp
src/hotspot/share/gc/shared/collectedHeap.hpp
src/hotspot/share/gc/shared/genCollectedHeap.cpp
src/hotspot/share/gc/shared/genCollectedHeap.hpp
src/hotspot/share/oops/oop.inline.hpp
--- a/src/hotspot/share/c1/c1_Runtime1.cpp	Fri Oct 13 14:02:01 2017 +0200
+++ b/src/hotspot/share/c1/c1_Runtime1.cpp	Fri Oct 13 15:08:56 2017 +0200
@@ -1221,11 +1221,6 @@
     MutexLockerEx ml_code (CodeCache_lock, Mutex::_no_safepoint_check_flag);
     nmethod* nm = CodeCache::find_nmethod(caller_frame.pc());
     guarantee(nm != NULL, "only nmethods can contain non-perm oops");
-    if (!nm->on_scavenge_root_list() &&
-        ((mirror.not_null() && mirror()->is_scavengable()) ||
-         (appendix.not_null() && appendix->is_scavengable()))) {
-      CodeCache::add_scavenge_root_nmethod(nm);
-    }
 
     // Since we've patched some oops in the nmethod,
     // (re)register it with the heap.
--- a/src/hotspot/share/code/codeCache.cpp	Fri Oct 13 14:02:01 2017 +0200
+++ b/src/hotspot/share/code/codeCache.cpp	Fri Oct 13 15:08:56 2017 +0200
@@ -683,22 +683,19 @@
       if (cb->is_alive()) {
         f->do_code_blob(cb);
 #ifdef ASSERT
-        if (cb->is_nmethod())
-        ((nmethod*)cb)->verify_scavenge_root_oops();
+        if (cb->is_nmethod()) {
+          Universe::heap()->verify_nmethod((nmethod*)cb);
+        }
 #endif //ASSERT
       }
     }
   }
 }
 
-// Walk the list of methods which might contain non-perm oops.
+// Walk the list of methods which might contain oops to the java heap.
 void CodeCache::scavenge_root_nmethods_do(CodeBlobToOopClosure* f) {
   assert_locked_or_safepoint(CodeCache_lock);
 
-  if (UseG1GC) {
-    return;
-  }
-
   const bool fix_relocations = f->fix_relocations();
   debug_only(mark_scavenge_root_nmethods());
 
@@ -735,13 +732,20 @@
   debug_only(verify_perm_nmethods(NULL));
 }
 
+void CodeCache::register_scavenge_root_nmethod(nmethod* nm) {
+  assert_locked_or_safepoint(CodeCache_lock);
+  if (!nm->on_scavenge_root_list() && nm->detect_scavenge_root_oops()) {
+    add_scavenge_root_nmethod(nm);
+  }
+}
+
+void CodeCache::verify_scavenge_root_nmethod(nmethod* nm) {
+  nm->verify_scavenge_root_oops();
+}
+
 void CodeCache::add_scavenge_root_nmethod(nmethod* nm) {
   assert_locked_or_safepoint(CodeCache_lock);
 
-  if (UseG1GC) {
-    return;
-  }
-
   nm->set_on_scavenge_root_list();
   nm->set_scavenge_root_link(_scavenge_root_nmethods);
   set_scavenge_root_nmethods(nm);
@@ -754,8 +758,6 @@
   assert((prev == NULL && scavenge_root_nmethods() == nm) ||
          (prev != NULL && prev->scavenge_root_link() == nm), "precondition");
 
-  assert(!UseG1GC, "G1 does not use the scavenge_root_nmethods list");
-
   print_trace("unlink_scavenge_root", nm);
   if (prev == NULL) {
     set_scavenge_root_nmethods(nm->scavenge_root_link());
@@ -769,10 +771,6 @@
 void CodeCache::drop_scavenge_root_nmethod(nmethod* nm) {
   assert_locked_or_safepoint(CodeCache_lock);
 
-  if (UseG1GC) {
-    return;
-  }
-
   print_trace("drop_scavenge_root", nm);
   nmethod* prev = NULL;
   for (nmethod* cur = scavenge_root_nmethods(); cur != NULL; cur = cur->scavenge_root_link()) {
@@ -788,10 +786,6 @@
 void CodeCache::prune_scavenge_root_nmethods() {
   assert_locked_or_safepoint(CodeCache_lock);
 
-  if (UseG1GC) {
-    return;
-  }
-
   debug_only(mark_scavenge_root_nmethods());
 
   nmethod* last = NULL;
@@ -820,10 +814,6 @@
 
 #ifndef PRODUCT
 void CodeCache::asserted_non_scavengable_nmethods_do(CodeBlobClosure* f) {
-  if (UseG1GC) {
-    return;
-  }
-
   // While we are here, verify the integrity of the list.
   mark_scavenge_root_nmethods();
   for (nmethod* cur = scavenge_root_nmethods(); cur != NULL; cur = cur->scavenge_root_link()) {
@@ -833,7 +823,7 @@
   verify_perm_nmethods(f);
 }
 
-// Temporarily mark nmethods that are claimed to be on the non-perm list.
+// Temporarily mark nmethods that are claimed to be on the scavenge list.
 void CodeCache::mark_scavenge_root_nmethods() {
   NMethodIterator iter;
   while(iter.next_alive()) {
@@ -854,7 +844,7 @@
     assert(nm->scavenge_root_not_marked(), "must be already processed");
     if (nm->on_scavenge_root_list())
       call_f = false;  // don't show this one to the client
-    nm->verify_scavenge_root_oops();
+    Universe::heap()->verify_nmethod(nm);
     if (call_f)  f_or_null->do_code_blob(nm);
   }
 }
@@ -1640,4 +1630,3 @@
             blob_count(), nmethod_count(), adapter_count(),
             unallocated_capacity());
 }
-
--- a/src/hotspot/share/code/codeCache.hpp	Fri Oct 13 14:02:01 2017 +0200
+++ b/src/hotspot/share/code/codeCache.hpp	Fri Oct 13 15:08:56 2017 +0200
@@ -181,6 +181,10 @@
   static void scavenge_root_nmethods_do(CodeBlobToOopClosure* f);
 
   static nmethod* scavenge_root_nmethods()            { return _scavenge_root_nmethods; }
+  // register_scavenge_root_nmethod() conditionally adds the nmethod to the list
+  // if it is not already on the list and has a scavengeable root
+  static void register_scavenge_root_nmethod(nmethod* nm);
+  static void verify_scavenge_root_nmethod(nmethod* nm);
   static void add_scavenge_root_nmethod(nmethod* nm);
   static void drop_scavenge_root_nmethod(nmethod* nm);
 
--- a/src/hotspot/share/code/nmethod.cpp	Fri Oct 13 14:02:01 2017 +0200
+++ b/src/hotspot/share/code/nmethod.cpp	Fri Oct 13 15:08:56 2017 +0200
@@ -411,11 +411,8 @@
   _oops_do_mark_link       = NULL;
   _jmethod_id              = NULL;
   _osr_link                = NULL;
-  if (UseG1GC) {
-    _unloading_next        = NULL;
-  } else {
-    _scavenge_root_link    = NULL;
-  }
+  _unloading_next          = NULL;
+  _scavenge_root_link      = NULL;
   _scavenge_root_state     = 0;
 #if INCLUDE_RTM_OPT
   _rtm_state               = NoRTM;
@@ -599,12 +596,9 @@
     code_buffer->copy_code_and_locs_to(this);
     code_buffer->copy_values_to(this);
     if (ScavengeRootsInCode) {
-      if (detect_scavenge_root_oops()) {
-        CodeCache::add_scavenge_root_nmethod(this);
-      }
       Universe::heap()->register_nmethod(this);
     }
-    debug_only(verify_scavenge_root_oops());
+    debug_only(Universe::heap()->verify_nmethod(this));
     CodeCache::commit(this);
   }
 
@@ -754,12 +748,9 @@
     debug_info->copy_to(this);
     dependencies->copy_to(this);
     if (ScavengeRootsInCode) {
-      if (detect_scavenge_root_oops()) {
-        CodeCache::add_scavenge_root_nmethod(this);
-      }
       Universe::heap()->register_nmethod(this);
     }
-    debug_only(verify_scavenge_root_oops());
+    debug_only(Universe::heap()->verify_nmethod(this));
 
     CodeCache::commit(this);
 
@@ -2137,7 +2128,7 @@
   VerifyOopsClosure voc(this);
   oops_do(&voc);
   assert(voc.ok(), "embedded oops must be OK");
-  verify_scavenge_root_oops();
+  Universe::heap()->verify_nmethod(this);
 
   verify_scopes();
 }
@@ -2230,10 +2221,6 @@
 };
 
 void nmethod::verify_scavenge_root_oops() {
-  if (UseG1GC) {
-    return;
-  }
-
   if (!on_scavenge_root_list()) {
     // Actually look inside, to verify the claim that it's clean.
     DebugScavengeRoot debug_scavenge_root(this);
--- a/src/hotspot/share/gc/g1/g1CollectedHeap.cpp	Fri Oct 13 14:02:01 2017 +0200
+++ b/src/hotspot/share/gc/g1/g1CollectedHeap.cpp	Fri Oct 13 15:08:56 2017 +0200
@@ -141,13 +141,6 @@
   reset_from_card_cache(start_idx, num_regions);
 }
 
-// Returns true if the reference points to an object that
-// can move in an incremental collection.
-bool G1CollectedHeap::is_scavengable(const void* p) {
-  HeapRegion* hr = heap_region_containing(p);
-  return !hr->is_pinned();
-}
-
 // Private methods.
 
 HeapRegion*
@@ -5323,17 +5316,20 @@
   void do_oop(narrowOop* p) { do_oop_work(p); }
 };
 
+// Returns true if the reference points to an object that
+// can move in an incremental collection.
+bool G1CollectedHeap::is_scavengable(oop obj) {
+  HeapRegion* hr = heap_region_containing(obj);
+  return !hr->is_pinned();
+}
+
 void G1CollectedHeap::register_nmethod(nmethod* nm) {
-  CollectedHeap::register_nmethod(nm);
-
   guarantee(nm != NULL, "sanity");
   RegisterNMethodOopClosure reg_cl(this, nm);
   nm->oops_do(&reg_cl);
 }
 
 void G1CollectedHeap::unregister_nmethod(nmethod* nm) {
-  CollectedHeap::unregister_nmethod(nm);
-
   guarantee(nm != NULL, "sanity");
   UnregisterNMethodOopClosure reg_cl(this, nm);
   nm->oops_do(&reg_cl, true);
--- a/src/hotspot/share/gc/g1/g1CollectedHeap.hpp	Fri Oct 13 14:02:01 2017 +0200
+++ b/src/hotspot/share/gc/g1/g1CollectedHeap.hpp	Fri Oct 13 15:08:56 2017 +0200
@@ -1282,8 +1282,6 @@
 
   inline bool is_in_young(const oop obj);
 
-  virtual bool is_scavengable(const void* addr);
-
   // We don't need barriers for initializing stores to objects
   // in the young gen: for the SATB pre-barrier, there is no
   // pre-value that needs to be remembered; for the remembered-set
@@ -1395,6 +1393,9 @@
 
   // Optimized nmethod scanning support routines
 
+  // Is an oop scavengeable
+  virtual bool is_scavengable(oop obj);
+
   // Register the given nmethod with the G1 heap.
   virtual void register_nmethod(nmethod* nm);
 
--- a/src/hotspot/share/gc/parallel/parallelScavengeHeap.cpp	Fri Oct 13 14:02:01 2017 +0200
+++ b/src/hotspot/share/gc/parallel/parallelScavengeHeap.cpp	Fri Oct 13 15:08:56 2017 +0200
@@ -23,6 +23,7 @@
  */
 
 #include "precompiled.hpp"
+#include "code/codeCache.hpp"
 #include "gc/parallel/adjoiningGenerations.hpp"
 #include "gc/parallel/adjoiningVirtualSpaces.hpp"
 #include "gc/parallel/cardTableExtension.hpp"
@@ -169,10 +170,6 @@
   return young_gen()->is_in_reserved(p) || old_gen()->is_in_reserved(p);
 }
 
-bool ParallelScavengeHeap::is_scavengable(const void* addr) {
-  return is_in_young((oop)addr);
-}
-
 // There are two levels of allocation policy here.
 //
 // When an allocation request fails, the requesting thread must invoke a VM
@@ -665,3 +662,15 @@
   }
 }
 #endif
+
+bool ParallelScavengeHeap::is_scavengable(oop obj) {
+  return is_in_young(obj);
+}
+
+void ParallelScavengeHeap::register_nmethod(nmethod* nm) {
+  CodeCache::register_scavenge_root_nmethod(nm);
+}
+
+void ParallelScavengeHeap::verify_nmethod(nmethod* nm) {
+  CodeCache::verify_scavenge_root_nmethod(nm);
+}
--- a/src/hotspot/share/gc/parallel/parallelScavengeHeap.hpp	Fri Oct 13 14:02:01 2017 +0200
+++ b/src/hotspot/share/gc/parallel/parallelScavengeHeap.hpp	Fri Oct 13 15:08:56 2017 +0200
@@ -134,7 +134,9 @@
   // can be moved in a partial collection.  For currently implemented
   // generational collectors that means during a collection of
   // the young gen.
-  virtual bool is_scavengable(const void* addr);
+  virtual bool is_scavengable(oop obj);
+  virtual void register_nmethod(nmethod* nm);
+  virtual void verify_nmethod(nmethod* nmethod);
 
   size_t max_capacity() const;
 
--- a/src/hotspot/share/gc/shared/collectedHeap.cpp	Fri Oct 13 14:02:01 2017 +0200
+++ b/src/hotspot/share/gc/shared/collectedHeap.cpp	Fri Oct 13 15:08:56 2017 +0200
@@ -135,14 +135,6 @@
   _barrier_set->print_on(st);
 }
 
-void CollectedHeap::register_nmethod(nmethod* nm) {
-  assert_locked_or_safepoint(CodeCache_lock);
-}
-
-void CollectedHeap::unregister_nmethod(nmethod* nm) {
-  assert_locked_or_safepoint(CodeCache_lock);
-}
-
 void CollectedHeap::trace_heap(GCWhen::Type when, const GCTracer* gc_tracer) {
   const GCHeapSummary& heap_summary = create_heap_summary();
   gc_tracer->report_gc_heap_summary(when, heap_summary);
--- a/src/hotspot/share/gc/shared/collectedHeap.hpp	Fri Oct 13 14:02:01 2017 +0200
+++ b/src/hotspot/share/gc/shared/collectedHeap.hpp	Fri Oct 13 15:08:56 2017 +0200
@@ -289,10 +289,6 @@
     return p == NULL || is_in_closed_subset(p);
   }
 
-  // An object is scavengable if its location may move during a scavenge.
-  // (A scavenge is a GC which is not a full GC.)
-  virtual bool is_scavengable(const void *p) = 0;
-
   void set_gc_cause(GCCause::Cause v) {
      if (UsePerfData) {
        _gc_lastcause = _gc_cause;
@@ -570,10 +566,14 @@
   void print_heap_before_gc();
   void print_heap_after_gc();
 
+  // An object is scavengable if its location may move during a scavenge.
+  // (A scavenge is a GC which is not a full GC.)
+  virtual bool is_scavengable(oop obj) = 0;
   // Registering and unregistering an nmethod (compiled code) with the heap.
   // Override with specific mechanism for each specialized heap type.
-  virtual void register_nmethod(nmethod* nm);
-  virtual void unregister_nmethod(nmethod* nm);
+  virtual void register_nmethod(nmethod* nm) {}
+  virtual void unregister_nmethod(nmethod* nm) {}
+  virtual void verify_nmethod(nmethod* nmethod) {}
 
   void trace_heap_before_gc(const GCTracer* gc_tracer);
   void trace_heap_after_gc(const GCTracer* gc_tracer);
--- a/src/hotspot/share/gc/shared/genCollectedHeap.cpp	Fri Oct 13 14:02:01 2017 +0200
+++ b/src/hotspot/share/gc/shared/genCollectedHeap.cpp	Fri Oct 13 15:08:56 2017 +0200
@@ -501,6 +501,14 @@
 #endif
 }
 
+void GenCollectedHeap::register_nmethod(nmethod* nm) {
+  CodeCache::register_scavenge_root_nmethod(nm);
+}
+
+void GenCollectedHeap::verify_nmethod(nmethod* nm) {
+  CodeCache::verify_scavenge_root_nmethod(nm);
+}
+
 HeapWord* GenCollectedHeap::satisfy_failed_allocation(size_t size, bool is_tlab) {
   return gen_policy()->satisfy_failed_allocation(size, is_tlab);
 }
--- a/src/hotspot/share/gc/shared/genCollectedHeap.hpp	Fri Oct 13 14:02:01 2017 +0200
+++ b/src/hotspot/share/gc/shared/genCollectedHeap.hpp	Fri Oct 13 15:08:56 2017 +0200
@@ -229,10 +229,14 @@
   bool is_in_partial_collection(const void* p);
 #endif
 
-  virtual bool is_scavengable(const void* addr) {
-    return is_in_young((oop)addr);
+  virtual bool is_scavengable(oop obj) {
+    return is_in_young(obj);
   }
 
+  // Optimized nmethod scanning support routines
+  virtual void register_nmethod(nmethod* nm);
+  virtual void verify_nmethod(nmethod* nmethod);
+
   // Iteration functions.
   void oop_iterate_no_header(OopClosure* cl);
   void oop_iterate(ExtendedOopClosure* cl);
--- a/src/hotspot/share/oops/oop.inline.hpp	Fri Oct 13 14:02:01 2017 +0200
+++ b/src/hotspot/share/oops/oop.inline.hpp	Fri Oct 13 15:08:56 2017 +0200
@@ -539,7 +539,7 @@
 }
 
 bool oopDesc::is_scavengable() const {
-  return Universe::heap()->is_scavengable(this);
+  return Universe::heap()->is_scavengable(oop(const_cast<oopDesc*>(this)));
 }
 
 // Used by scavengers