8202164: Remove some unneeded BoolObjectClosure* is_alive parameters
authorcoleenp
Wed, 25 Apr 2018 17:50:32 -0400
changeset 49890 29b94ed63a09
parent 49889 4de5ca97feee
child 49891 61b0342b5711
8202164: Remove some unneeded BoolObjectClosure* is_alive parameters Reviewed-by: kbarrett, stefank
src/hotspot/share/aot/aotCompiledMethod.hpp
src/hotspot/share/code/compiledMethod.cpp
src/hotspot/share/code/compiledMethod.hpp
src/hotspot/share/code/nmethod.cpp
src/hotspot/share/code/nmethod.hpp
src/hotspot/share/gc/g1/g1CollectedHeap.cpp
--- a/src/hotspot/share/aot/aotCompiledMethod.hpp	Wed Apr 25 11:24:33 2018 -0700
+++ b/src/hotspot/share/aot/aotCompiledMethod.hpp	Wed Apr 25 17:50:32 2018 -0400
@@ -285,7 +285,7 @@
 
 protected:
   virtual bool do_unloading_oops(address low_boundary, BoolObjectClosure* is_alive, bool unloading_occurred);
-  virtual bool do_unloading_jvmci(BoolObjectClosure* is_alive, bool unloading_occurred) { return false; }
+  virtual bool do_unloading_jvmci(bool unloading_occurred) { return false; }
 
 };
 
--- a/src/hotspot/share/code/compiledMethod.cpp	Wed Apr 25 11:24:33 2018 -0700
+++ b/src/hotspot/share/code/compiledMethod.cpp	Wed Apr 25 17:50:32 2018 -0400
@@ -525,7 +525,7 @@
   }
 
 #if INCLUDE_JVMCI
-  if (do_unloading_jvmci(is_alive, unloading_occurred)) {
+  if (do_unloading_jvmci(unloading_occurred)) {
     return;
   }
 #endif
@@ -535,7 +535,7 @@
 }
 
 template <class CompiledICorStaticCall>
-static bool clean_if_nmethod_is_unloaded(CompiledICorStaticCall *ic, address addr, BoolObjectClosure *is_alive, CompiledMethod* from) {
+static bool clean_if_nmethod_is_unloaded(CompiledICorStaticCall *ic, address addr, CompiledMethod* from) {
   // Ok, to lookup references to zombies here
   CodeBlob *cb = CodeCache::find_blob_unsafe(addr);
   CompiledMethod* nm = (cb != NULL) ? cb->as_compiled_method_or_null() : NULL;
@@ -555,12 +555,12 @@
   return false;
 }
 
-static bool clean_if_nmethod_is_unloaded(CompiledIC *ic, BoolObjectClosure *is_alive, CompiledMethod* from) {
-  return clean_if_nmethod_is_unloaded(ic, ic->ic_destination(), is_alive, from);
+static bool clean_if_nmethod_is_unloaded(CompiledIC *ic, CompiledMethod* from) {
+  return clean_if_nmethod_is_unloaded(ic, ic->ic_destination(), from);
 }
 
-static bool clean_if_nmethod_is_unloaded(CompiledStaticCall *csc, BoolObjectClosure *is_alive, CompiledMethod* from) {
-  return clean_if_nmethod_is_unloaded(csc, csc->destination(), is_alive, from);
+static bool clean_if_nmethod_is_unloaded(CompiledStaticCall *csc, CompiledMethod* from) {
+  return clean_if_nmethod_is_unloaded(csc, csc->destination(), from);
 }
 
 bool CompiledMethod::do_unloading_parallel(BoolObjectClosure* is_alive, bool unloading_occurred) {
@@ -608,15 +608,15 @@
         clean_ic_if_metadata_is_dead(CompiledIC_at(&iter));
       }
 
-      postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
+      postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), this);
       break;
 
     case relocInfo::opt_virtual_call_type:
-      postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
+      postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), this);
       break;
 
     case relocInfo::static_call_type:
-      postponed |= clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), is_alive, this);
+      postponed |= clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), this);
       break;
 
     case relocInfo::oop_type:
@@ -636,7 +636,7 @@
   }
 
 #if INCLUDE_JVMCI
-  if (do_unloading_jvmci(is_alive, unloading_occurred)) {
+  if (do_unloading_jvmci(unloading_occurred)) {
     return postponed;
   }
 #endif
@@ -647,7 +647,7 @@
   return postponed;
 }
 
-void CompiledMethod::do_unloading_parallel_postponed(BoolObjectClosure* is_alive, bool unloading_occurred) {
+void CompiledMethod::do_unloading_parallel_postponed() {
   ResourceMark rm;
 
   // Make sure the oop's ready to receive visitors
@@ -671,15 +671,15 @@
     switch (iter.type()) {
 
     case relocInfo::virtual_call_type:
-      clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
+      clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), this);
       break;
 
     case relocInfo::opt_virtual_call_type:
-      clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
+      clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), this);
       break;
 
     case relocInfo::static_call_type:
-      clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), is_alive, this);
+      clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), this);
       break;
 
     default:
--- a/src/hotspot/share/code/compiledMethod.hpp	Wed Apr 25 11:24:33 2018 -0700
+++ b/src/hotspot/share/code/compiledMethod.hpp	Wed Apr 25 17:50:32 2018 -0400
@@ -372,7 +372,7 @@
   virtual void do_unloading(BoolObjectClosure* is_alive, bool unloading_occurred);
   //  The parallel versions are used by G1.
   virtual bool do_unloading_parallel(BoolObjectClosure* is_alive, bool unloading_occurred);
-  virtual void do_unloading_parallel_postponed(BoolObjectClosure* is_alive, bool unloading_occurred);
+  virtual void do_unloading_parallel_postponed();
 
   static unsigned char global_unloading_clock()   { return _global_unloading_clock; }
   static void increase_unloading_clock();
@@ -383,7 +383,7 @@
 protected:
   virtual bool do_unloading_oops(address low_boundary, BoolObjectClosure* is_alive, bool unloading_occurred) = 0;
 #if INCLUDE_JVMCI
-  virtual bool do_unloading_jvmci(BoolObjectClosure* is_alive, bool unloading_occurred) = 0;
+  virtual bool do_unloading_jvmci(bool unloading_occurred) = 0;
 #endif
 
 private:
--- a/src/hotspot/share/code/nmethod.cpp	Wed Apr 25 11:24:33 2018 -0700
+++ b/src/hotspot/share/code/nmethod.cpp	Wed Apr 25 17:50:32 2018 -0400
@@ -1028,17 +1028,16 @@
   mdo->inc_decompile_count();
 }
 
-void nmethod::make_unloaded(BoolObjectClosure* is_alive, oop cause) {
+void nmethod::make_unloaded(oop cause) {
 
   post_compiled_method_unload();
 
-  // Since this nmethod is being unloaded, make sure that dependencies
-  // recorded in instanceKlasses get flushed and pass non-NULL closure to
-  // indicate that this work is being done during a GC.
+  // This nmethod is being unloaded, make sure that dependencies
+  // recorded in instanceKlasses get flushed.
+  // Since this work is being done during a GC, defer deleting dependencies from the
+  // InstanceKlass.
   assert(Universe::heap()->is_gc_active(), "should only be called during gc");
-  assert(is_alive != NULL, "Should be non-NULL");
-  // A non-NULL is_alive closure indicates that this is being called during GC.
-  flush_dependencies(is_alive);
+  flush_dependencies(/*delete_immediately*/false);
 
   // Break cycle between nmethod & method
   LogTarget(Trace, class, unload) lt;
@@ -1261,7 +1260,7 @@
       if (nmethod_needs_unregister) {
         Universe::heap()->unregister_nmethod(this);
       }
-      flush_dependencies(NULL);
+      flush_dependencies(/*delete_immediately*/true);
     }
 
     // zombie only - if a JVMTI agent has enabled the CompiledMethodUnload
@@ -1344,13 +1343,13 @@
 // of dependencies must happen during phase 1 since after GC any
 // dependencies in the unloaded nmethod won't be updated, so
 // traversing the dependency information in unsafe.  In that case this
-// function is called with a non-NULL argument and this function only
+// function is called with a boolean argument and this function only
 // notifies instanceKlasses that are reachable
 
-void nmethod::flush_dependencies(BoolObjectClosure* is_alive) {
+void nmethod::flush_dependencies(bool delete_immediately) {
   assert_locked_or_safepoint(CodeCache_lock);
-  assert(Universe::heap()->is_gc_active() == (is_alive != NULL),
-  "is_alive is non-NULL if and only if we are called during GC");
+  assert(Universe::heap()->is_gc_active() != delete_immediately,
+  "delete_immediately is false if and only if we are called during GC");
   if (!has_flushed_dependencies()) {
     set_has_flushed_dependencies();
     for (Dependencies::DepStream deps(this); deps.next(); ) {
@@ -1363,13 +1362,12 @@
         if (klass == NULL) {
           continue;  // ignore things like evol_method
         }
-        // During GC the is_alive closure is non-NULL, and is used to
-        // determine liveness of dependees that need to be updated.
-        if (is_alive == NULL || klass->is_loader_alive()) {
+        // During GC delete_immediately is false, and liveness
+        // of dependee determines class that needs to be updated.
+        if (delete_immediately || klass->is_loader_alive()) {
           // The GC defers deletion of this entry, since there might be multiple threads
           // iterating over the _dependencies graph. Other call paths are single-threaded
           // and may delete it immediately.
-          bool delete_immediately = is_alive == NULL;
           InstanceKlass::cast(klass)->remove_dependent_nmethod(this, delete_immediately);
         }
       }
@@ -1390,7 +1388,7 @@
   // simply because one of its constant oops has gone dead.
   // No actual classes need to be unloaded in order for this to occur.
   assert(unloading_occurred || ScavengeRootsInCode, "Inconsistency in unloading");
-  make_unloaded(is_alive, obj);
+  make_unloaded(obj);
   return true;
 }
 
@@ -1516,12 +1514,12 @@
 }
 
 #if INCLUDE_JVMCI
-bool nmethod::do_unloading_jvmci(BoolObjectClosure* is_alive, bool unloading_occurred) {
+bool nmethod::do_unloading_jvmci(bool unloading_occurred) {
   if (_jvmci_installed_code != NULL) {
     if (JNIHandles::is_global_weak_cleared(_jvmci_installed_code)) {
       if (_jvmci_installed_code_triggers_unloading) {
         // jweak reference processing has already cleared the referent
-        make_unloaded(is_alive, NULL);
+        make_unloaded(NULL);
         return true;
       } else {
         clear_jvmci_installed_code();
--- a/src/hotspot/share/code/nmethod.hpp	Wed Apr 25 11:24:33 2018 -0700
+++ b/src/hotspot/share/code/nmethod.hpp	Wed Apr 25 17:50:32 2018 -0400
@@ -349,10 +349,10 @@
     return _state;
   }
 
-  void  make_unloaded(BoolObjectClosure* is_alive, oop cause);
+  void  make_unloaded(oop cause);
 
   bool has_dependencies()                         { return dependencies_size() != 0; }
-  void flush_dependencies(BoolObjectClosure* is_alive);
+  void flush_dependencies(bool delete_immediately);
   bool has_flushed_dependencies()                 { return _has_flushed_dependencies; }
   void set_has_flushed_dependencies()             {
     assert(!has_flushed_dependencies(), "should only happen once");
@@ -488,7 +488,7 @@
 #if INCLUDE_JVMCI
   // See comment for _jvmci_installed_code_triggers_unloading field.
   // Returns whether this nmethod was unloaded.
-  virtual bool do_unloading_jvmci(BoolObjectClosure* is_alive, bool unloading_occurred);
+  virtual bool do_unloading_jvmci(bool unloading_occurred);
 #endif
 
  private:
--- a/src/hotspot/share/gc/g1/g1CollectedHeap.cpp	Wed Apr 25 11:24:33 2018 -0700
+++ b/src/hotspot/share/gc/g1/g1CollectedHeap.cpp	Wed Apr 25 17:50:32 2018 -0400
@@ -3353,7 +3353,7 @@
   }
 
   void clean_nmethod_postponed(CompiledMethod* nm) {
-    nm->do_unloading_parallel_postponed(_is_alive, _unloading_occurred);
+    nm->do_unloading_parallel_postponed();
   }
 
   static const int MaxClaimNmethods = 16;