8066827: Remove ReferenceProcessor::clean_up_discovered_references()
authorkbarrett
Wed, 17 Dec 2014 22:32:44 -0500
changeset 28212 647b7d0efb88
parent 28211 f3a2bc065255
child 28213 b0bf57cd1e9d
8066827: Remove ReferenceProcessor::clean_up_discovered_references() Summary: Abandon rather than clean up discovered references. Reviewed-by: jmasa, ysr
hotspot/src/share/vm/gc_implementation/concurrentMarkSweep/concurrentMarkSweepGeneration.cpp
hotspot/src/share/vm/gc_implementation/g1/concurrentMark.cpp
hotspot/src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp
hotspot/src/share/vm/gc_implementation/parallelScavenge/psMarkSweep.cpp
hotspot/src/share/vm/gc_implementation/parallelScavenge/psParallelCompact.cpp
hotspot/src/share/vm/gc_implementation/parallelScavenge/psScavenge.cpp
hotspot/src/share/vm/memory/genCollectedHeap.cpp
hotspot/src/share/vm/memory/referenceProcessor.cpp
hotspot/src/share/vm/memory/referenceProcessor.hpp
--- a/hotspot/src/share/vm/gc_implementation/concurrentMarkSweep/concurrentMarkSweepGeneration.cpp	Thu Dec 18 13:30:00 2014 +0000
+++ b/hotspot/src/share/vm/gc_implementation/concurrentMarkSweep/concurrentMarkSweepGeneration.cpp	Wed Dec 17 22:32:44 2014 -0500
@@ -1608,14 +1608,15 @@
 
   // If the collection is being acquired from the background
   // collector, there may be references on the discovered
-  // references lists that have NULL referents (being those
-  // that were concurrently cleared by a mutator) or
-  // that are no longer active (having been enqueued concurrently
-  // by the mutator).
-  // Scrub the list of those references because Mark-Sweep-Compact
-  // code assumes referents are not NULL and that all discovered
-  // Reference objects are active.
-  ref_processor()->clean_up_discovered_references();
+  // references lists.  Abandon those references, since some
+  // of them may have become unreachable after concurrent
+  // discovery; the STW compacting collector will redo discovery
+  // more precisely, without being subject to floating garbage.
+  // Leaving otherwise unreachable references in the discovered
+  // lists would require special handling.
+  ref_processor()->disable_discovery();
+  ref_processor()->abandon_partial_discovery();
+  ref_processor()->verify_no_references_recorded();
 
   if (first_state > Idling) {
     save_heap_summary();
@@ -1681,7 +1682,7 @@
   ReferenceProcessorMTDiscoveryMutator rp_mut_discovery(ref_processor(), false);
 
   ref_processor()->set_enqueuing_is_done(false);
-  ref_processor()->enable_discovery(false /*verify_disabled*/, false /*check_no_refs*/);
+  ref_processor()->enable_discovery();
   ref_processor()->setup_policy(clear_all_soft_refs);
   // If an asynchronous collection finishes, the _modUnionTable is
   // all clear.  If we are assuming the collection from an asynchronous
@@ -2998,7 +2999,7 @@
                     Mutex::_no_safepoint_check_flag);
     checkpointRootsInitialWork();
     // enable ("weak") refs discovery
-    rp->enable_discovery(true /*verify_disabled*/, true /*check_no_refs*/);
+    rp->enable_discovery();
     _collectorState = Marking;
   }
   SpecializationStats::print();
--- a/hotspot/src/share/vm/gc_implementation/g1/concurrentMark.cpp	Thu Dec 18 13:30:00 2014 +0000
+++ b/hotspot/src/share/vm/gc_implementation/g1/concurrentMark.cpp	Wed Dec 17 22:32:44 2014 -0500
@@ -971,7 +971,7 @@
   // Start Concurrent Marking weak-reference discovery.
   ReferenceProcessor* rp = g1h->ref_processor_cm();
   // enable ("weak") refs discovery
-  rp->enable_discovery(true /*verify_disabled*/, true /*verify_no_refs*/);
+  rp->enable_discovery();
   rp->setup_policy(false); // snapshot the soft ref policy to be used in this cycle
 
   SATBMarkQueueSet& satb_mq_set = JavaThread::satb_mark_queue_set();
--- a/hotspot/src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp	Thu Dec 18 13:30:00 2014 +0000
+++ b/hotspot/src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp	Wed Dec 17 22:32:44 2014 -0500
@@ -1301,7 +1301,7 @@
       // Temporarily clear the STW ref processor's _is_alive_non_header field.
       ReferenceProcessorIsAliveMutator stw_rp_is_alive_null(ref_processor_stw(), NULL);
 
-      ref_processor_stw()->enable_discovery(true /*verify_disabled*/, true /*verify_no_refs*/);
+      ref_processor_stw()->enable_discovery();
       ref_processor_stw()->setup_policy(do_clear_all_soft_refs);
 
       // Do collection work
@@ -3750,8 +3750,7 @@
       // reference processing currently works in G1.
 
       // Enable discovery in the STW reference processor
-      ref_processor_stw()->enable_discovery(true /*verify_disabled*/,
-                                            true /*verify_no_refs*/);
+      ref_processor_stw()->enable_discovery();
 
       {
         // We want to temporarily turn off discovery by the
--- a/hotspot/src/share/vm/gc_implementation/parallelScavenge/psMarkSweep.cpp	Thu Dec 18 13:30:00 2014 +0000
+++ b/hotspot/src/share/vm/gc_implementation/parallelScavenge/psMarkSweep.cpp	Wed Dec 17 22:32:44 2014 -0500
@@ -195,7 +195,7 @@
 
     COMPILER2_PRESENT(DerivedPointerTable::clear());
 
-    ref_processor()->enable_discovery(true /*verify_disabled*/, true /*verify_no_refs*/);
+    ref_processor()->enable_discovery();
     ref_processor()->setup_policy(clear_all_softrefs);
 
     mark_sweep_phase1(clear_all_softrefs);
--- a/hotspot/src/share/vm/gc_implementation/parallelScavenge/psParallelCompact.cpp	Thu Dec 18 13:30:00 2014 +0000
+++ b/hotspot/src/share/vm/gc_implementation/parallelScavenge/psParallelCompact.cpp	Wed Dec 17 22:32:44 2014 -0500
@@ -2069,7 +2069,7 @@
 
     COMPILER2_PRESENT(DerivedPointerTable::clear());
 
-    ref_processor()->enable_discovery(true /*verify_disabled*/, true /*verify_no_refs*/);
+    ref_processor()->enable_discovery();
     ref_processor()->setup_policy(maximum_heap_compaction);
 
     bool marked_for_unloading = false;
--- a/hotspot/src/share/vm/gc_implementation/parallelScavenge/psScavenge.cpp	Thu Dec 18 13:30:00 2014 +0000
+++ b/hotspot/src/share/vm/gc_implementation/parallelScavenge/psScavenge.cpp	Wed Dec 17 22:32:44 2014 -0500
@@ -362,7 +362,7 @@
 
     COMPILER2_PRESENT(DerivedPointerTable::clear());
 
-    reference_processor()->enable_discovery(true /*verify_disabled*/, true /*verify_no_refs*/);
+    reference_processor()->enable_discovery();
     reference_processor()->setup_policy(false);
 
     // We track how much was promoted to the next generation for
--- a/hotspot/src/share/vm/memory/genCollectedHeap.cpp	Thu Dec 18 13:30:00 2014 +0000
+++ b/hotspot/src/share/vm/memory/genCollectedHeap.cpp	Wed Dec 17 22:32:44 2014 -0500
@@ -465,7 +465,7 @@
           // atomic wrt other collectors in this configuration, we
           // are guaranteed to have empty discovered ref lists.
           if (rp->discovery_is_atomic()) {
-            rp->enable_discovery(true /*verify_disabled*/, true /*verify_no_refs*/);
+            rp->enable_discovery();
             rp->setup_policy(do_clear_all_soft_refs);
           } else {
             // collect() below will enable discovery as appropriate
--- a/hotspot/src/share/vm/memory/referenceProcessor.cpp	Thu Dec 18 13:30:00 2014 +0000
+++ b/hotspot/src/share/vm/memory/referenceProcessor.cpp	Wed Dec 17 22:32:44 2014 -0500
@@ -68,10 +68,10 @@
   _pending_list_uses_discovered_field = JDK_Version::current().pending_list_uses_discovered_field();
 }
 
-void ReferenceProcessor::enable_discovery(bool verify_disabled, bool check_no_refs) {
+void ReferenceProcessor::enable_discovery(bool check_no_refs) {
 #ifdef ASSERT
   // Verify that we're not currently discovering refs
-  assert(!verify_disabled || !_discovering_refs, "nested call?");
+  assert(!_discovering_refs, "nested call?");
 
   if (check_no_refs) {
     // Verify that the discovered lists are empty
@@ -963,52 +963,6 @@
   return total_list_count;
 }
 
-void ReferenceProcessor::clean_up_discovered_references() {
-  // loop over the lists
-  for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
-    if (TraceReferenceGC && PrintGCDetails && ((i % _max_num_q) == 0)) {
-      gclog_or_tty->print_cr(
-        "\nScrubbing %s discovered list of Null referents",
-        list_name(i));
-    }
-    clean_up_discovered_reflist(_discovered_refs[i]);
-  }
-}
-
-void ReferenceProcessor::clean_up_discovered_reflist(DiscoveredList& refs_list) {
-  assert(!discovery_is_atomic(), "Else why call this method?");
-  DiscoveredListIterator iter(refs_list, NULL, NULL);
-  while (iter.has_next()) {
-    iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
-    oop next = java_lang_ref_Reference::next(iter.obj());
-    assert(next->is_oop_or_null(), err_msg("Expected an oop or NULL for next field at " PTR_FORMAT, p2i(next)));
-    // If referent has been cleared or Reference is not active,
-    // drop it.
-    if (iter.referent() == NULL || next != NULL) {
-      debug_only(
-        if (PrintGCDetails && TraceReferenceGC) {
-          gclog_or_tty->print_cr("clean_up_discovered_list: Dropping Reference: "
-            INTPTR_FORMAT " with next field: " INTPTR_FORMAT
-            " and referent: " INTPTR_FORMAT,
-            (void *)iter.obj(), (void *)next, (void *)iter.referent());
-        }
-      )
-      // Remove Reference object from list
-      iter.remove();
-      iter.move_to_next();
-    } else {
-      iter.next();
-    }
-  }
-  NOT_PRODUCT(
-    if (PrintGCDetails && TraceReferenceGC) {
-      gclog_or_tty->print(
-        " Removed %d Refs with NULL referents out of %d discovered Refs",
-        iter.removed(), iter.processed());
-    }
-  )
-}
-
 inline DiscoveredList* ReferenceProcessor::get_discovered_list(ReferenceType rt) {
   uint id = 0;
   // Determine the queue index to use for this object.
--- a/hotspot/src/share/vm/memory/referenceProcessor.hpp	Thu Dec 18 13:30:00 2014 +0000
+++ b/hotspot/src/share/vm/memory/referenceProcessor.hpp	Wed Dec 17 22:32:44 2014 -0500
@@ -353,19 +353,6 @@
                                       GCTimer*           gc_timer,
                                       GCId               gc_id);
 
-  // Delete entries in the discovered lists that have
-  // either a null referent or are not active. Such
-  // Reference objects can result from the clearing
-  // or enqueueing of Reference objects concurrent
-  // with their discovery by a (concurrent) collector.
-  // For a definition of "active" see java.lang.ref.Reference;
-  // Refs are born active, become inactive when enqueued,
-  // and never become active again. The state of being
-  // active is encoded as follows: A Ref is active
-  // if and only if its "next" field is NULL.
-  void clean_up_discovered_references();
-  void clean_up_discovered_reflist(DiscoveredList& refs_list);
-
   // Returns the name of the discovered reference list
   // occupying the i / _num_q slot.
   const char* list_name(uint i);
@@ -439,7 +426,7 @@
   void      set_span(MemRegion span) { _span = span; }
 
   // start and stop weak ref discovery
-  void enable_discovery(bool verify_disabled, bool check_no_refs);
+  void enable_discovery(bool check_no_refs = true);
   void disable_discovery()  { _discovering_refs = false; }
   bool discovery_enabled()  { return _discovering_refs;  }
 
@@ -517,7 +504,7 @@
 
   ~NoRefDiscovery() {
     if (_was_discovering_refs) {
-      _rp->enable_discovery(true /*verify_disabled*/, false /*check_no_refs*/);
+      _rp->enable_discovery(false /*check_no_refs*/);
     }
   }
 };