hotspot/src/share/vm/memory/referenceProcessor.cpp
changeset 1 489c9b5090e2
child 178 3f20890fb323
equal deleted inserted replaced
0:fd16c54261b3 1:489c9b5090e2
       
     1 /*
       
     2  * Copyright 2001-2007 Sun Microsystems, Inc.  All Rights Reserved.
       
     3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
       
     4  *
       
     5  * This code is free software; you can redistribute it and/or modify it
       
     6  * under the terms of the GNU General Public License version 2 only, as
       
     7  * published by the Free Software Foundation.
       
     8  *
       
     9  * This code is distributed in the hope that it will be useful, but WITHOUT
       
    10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
       
    11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
       
    12  * version 2 for more details (a copy is included in the LICENSE file that
       
    13  * accompanied this code).
       
    14  *
       
    15  * You should have received a copy of the GNU General Public License version
       
    16  * 2 along with this work; if not, write to the Free Software Foundation,
       
    17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
       
    18  *
       
    19  * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
       
    20  * CA 95054 USA or visit www.sun.com if you need additional information or
       
    21  * have any questions.
       
    22  *
       
    23  */
       
    24 
       
    25 # include "incls/_precompiled.incl"
       
    26 # include "incls/_referenceProcessor.cpp.incl"
       
    27 
       
    28 // List of discovered references.
       
    29 class DiscoveredList {
       
    30 public:
       
    31          DiscoveredList() : _head(NULL), _len(0) { }
       
    32   oop    head() const           { return _head; }
       
    33   oop*   head_ptr()             { return &_head; }
       
    34   void   set_head(oop o)        { _head = o; }
       
    35   bool   empty() const          { return _head == ReferenceProcessor::_sentinelRef; }
       
    36   size_t length()               { return _len; }
       
    37   void   set_length(size_t len) { _len = len; }
       
    38 private:
       
    39   size_t _len;
       
    40   oop   _head;
       
    41 };
       
    42 
       
    43 oop  ReferenceProcessor::_sentinelRef = NULL;
       
    44 
       
    45 const int subclasses_of_ref = REF_PHANTOM - REF_OTHER;
       
    46 
       
    47 void referenceProcessor_init() {
       
    48   ReferenceProcessor::init_statics();
       
    49 }
       
    50 
       
    51 void ReferenceProcessor::init_statics() {
       
    52   assert(_sentinelRef == NULL, "should be initialized precsiely once");
       
    53   EXCEPTION_MARK;
       
    54   _sentinelRef = instanceKlass::cast(
       
    55                    SystemDictionary::object_klass())->
       
    56                      allocate_permanent_instance(THREAD);
       
    57 
       
    58   // Initialize the master soft ref clock.
       
    59   java_lang_ref_SoftReference::set_clock(os::javaTimeMillis());
       
    60 
       
    61   if (HAS_PENDING_EXCEPTION) {
       
    62       Handle ex(THREAD, PENDING_EXCEPTION);
       
    63       vm_exit_during_initialization(ex);
       
    64   }
       
    65   assert(_sentinelRef != NULL && _sentinelRef->is_oop(),
       
    66          "Just constructed it!");
       
    67   guarantee(RefDiscoveryPolicy == ReferenceBasedDiscovery ||
       
    68             RefDiscoveryPolicy == ReferentBasedDiscovery,
       
    69             "Unrecongnized RefDiscoveryPolicy");
       
    70 }
       
    71 
       
    72 
       
    73 ReferenceProcessor* ReferenceProcessor::create_ref_processor(
       
    74     MemRegion          span,
       
    75     bool               atomic_discovery,
       
    76     bool               mt_discovery,
       
    77     BoolObjectClosure* is_alive_non_header,
       
    78     int                parallel_gc_threads,
       
    79     bool               mt_processing)
       
    80 {
       
    81   int mt_degree = 1;
       
    82   if (parallel_gc_threads > 1) {
       
    83     mt_degree = parallel_gc_threads;
       
    84   }
       
    85   ReferenceProcessor* rp =
       
    86     new ReferenceProcessor(span, atomic_discovery,
       
    87                            mt_discovery, mt_degree,
       
    88                            mt_processing);
       
    89   if (rp == NULL) {
       
    90     vm_exit_during_initialization("Could not allocate ReferenceProcessor object");
       
    91   }
       
    92   rp->set_is_alive_non_header(is_alive_non_header);
       
    93   return rp;
       
    94 }
       
    95 
       
    96 
       
    97 ReferenceProcessor::ReferenceProcessor(MemRegion span,
       
    98   bool atomic_discovery, bool mt_discovery, int mt_degree,
       
    99   bool mt_processing) :
       
   100   _discovering_refs(false),
       
   101   _enqueuing_is_done(false),
       
   102   _is_alive_non_header(NULL),
       
   103   _processing_is_mt(mt_processing),
       
   104   _next_id(0)
       
   105 {
       
   106   _span = span;
       
   107   _discovery_is_atomic = atomic_discovery;
       
   108   _discovery_is_mt     = mt_discovery;
       
   109   _num_q               = mt_degree;
       
   110   _discoveredSoftRefs  = NEW_C_HEAP_ARRAY(DiscoveredList, _num_q * subclasses_of_ref);
       
   111   if (_discoveredSoftRefs == NULL) {
       
   112     vm_exit_during_initialization("Could not allocated RefProc Array");
       
   113   }
       
   114   _discoveredWeakRefs    = &_discoveredSoftRefs[_num_q];
       
   115   _discoveredFinalRefs   = &_discoveredWeakRefs[_num_q];
       
   116   _discoveredPhantomRefs = &_discoveredFinalRefs[_num_q];
       
   117   assert(_sentinelRef != NULL, "_sentinelRef is NULL");
       
   118   // Initialized all entries to _sentinelRef
       
   119   for (int i = 0; i < _num_q * subclasses_of_ref; i++) {
       
   120         _discoveredSoftRefs[i].set_head(_sentinelRef);
       
   121     _discoveredSoftRefs[i].set_length(0);
       
   122   }
       
   123 }
       
   124 
       
   125 #ifndef PRODUCT
       
   126 void ReferenceProcessor::verify_no_references_recorded() {
       
   127   guarantee(!_discovering_refs, "Discovering refs?");
       
   128   for (int i = 0; i < _num_q * subclasses_of_ref; i++) {
       
   129     guarantee(_discoveredSoftRefs[i].empty(),
       
   130               "Found non-empty discovered list");
       
   131   }
       
   132 }
       
   133 #endif
       
   134 
       
   135 void ReferenceProcessor::weak_oops_do(OopClosure* f) {
       
   136   for (int i = 0; i < _num_q * subclasses_of_ref; i++) {
       
   137     f->do_oop(_discoveredSoftRefs[i].head_ptr());
       
   138   }
       
   139 }
       
   140 
       
   141 void ReferenceProcessor::oops_do(OopClosure* f) {
       
   142   f->do_oop(&_sentinelRef);
       
   143 }
       
   144 
       
   145 void ReferenceProcessor::update_soft_ref_master_clock()
       
   146 {
       
   147   // Update (advance) the soft ref master clock field. This must be done
       
   148   // after processing the soft ref list.
       
   149   jlong now = os::javaTimeMillis();
       
   150   jlong clock = java_lang_ref_SoftReference::clock();
       
   151   NOT_PRODUCT(
       
   152   if (now < clock) {
       
   153     warning("time warp: %d to %d", clock, now);
       
   154   }
       
   155   )
       
   156   // In product mode, protect ourselves from system time being adjusted
       
   157   // externally and going backward; see note in the implementation of
       
   158   // GenCollectedHeap::time_since_last_gc() for the right way to fix
       
   159   // this uniformly throughout the VM; see bug-id 4741166. XXX
       
   160   if (now > clock) {
       
   161     java_lang_ref_SoftReference::set_clock(now);
       
   162   }
       
   163   // Else leave clock stalled at its old value until time progresses
       
   164   // past clock value.
       
   165 }
       
   166 
       
   167 
       
   168 void
       
   169 ReferenceProcessor::process_discovered_references(
       
   170   ReferencePolicy*             policy,
       
   171   BoolObjectClosure*           is_alive,
       
   172   OopClosure*                  keep_alive,
       
   173   VoidClosure*                 complete_gc,
       
   174   AbstractRefProcTaskExecutor* task_executor) {
       
   175   NOT_PRODUCT(verify_ok_to_handle_reflists());
       
   176 
       
   177   assert(!enqueuing_is_done(), "If here enqueuing should not be complete");
       
   178   // Stop treating discovered references specially.
       
   179   disable_discovery();
       
   180 
       
   181   bool trace_time = PrintGCDetails && PrintReferenceGC;
       
   182   // Soft references
       
   183   {
       
   184     TraceTime tt("SoftReference", trace_time, false, gclog_or_tty);
       
   185     process_discovered_reflist(_discoveredSoftRefs, policy, true,
       
   186                                is_alive, keep_alive, complete_gc, task_executor);
       
   187   }
       
   188 
       
   189   update_soft_ref_master_clock();
       
   190 
       
   191   // Weak references
       
   192   {
       
   193     TraceTime tt("WeakReference", trace_time, false, gclog_or_tty);
       
   194     process_discovered_reflist(_discoveredWeakRefs, NULL, true,
       
   195                                is_alive, keep_alive, complete_gc, task_executor);
       
   196   }
       
   197 
       
   198   // Final references
       
   199   {
       
   200     TraceTime tt("FinalReference", trace_time, false, gclog_or_tty);
       
   201     process_discovered_reflist(_discoveredFinalRefs, NULL, false,
       
   202                                is_alive, keep_alive, complete_gc, task_executor);
       
   203   }
       
   204 
       
   205   // Phantom references
       
   206   {
       
   207     TraceTime tt("PhantomReference", trace_time, false, gclog_or_tty);
       
   208     process_discovered_reflist(_discoveredPhantomRefs, NULL, false,
       
   209                                is_alive, keep_alive, complete_gc, task_executor);
       
   210   }
       
   211 
       
   212   // Weak global JNI references. It would make more sense (semantically) to
       
   213   // traverse these simultaneously with the regular weak references above, but
       
   214   // that is not how the JDK1.2 specification is. See #4126360. Native code can
       
   215   // thus use JNI weak references to circumvent the phantom references and
       
   216   // resurrect a "post-mortem" object.
       
   217   {
       
   218     TraceTime tt("JNI Weak Reference", trace_time, false, gclog_or_tty);
       
   219     if (task_executor != NULL) {
       
   220       task_executor->set_single_threaded_mode();
       
   221     }
       
   222     process_phaseJNI(is_alive, keep_alive, complete_gc);
       
   223   }
       
   224 }
       
   225 
       
   226 
       
   227 #ifndef PRODUCT
       
   228 // Calculate the number of jni handles.
       
   229 unsigned int ReferenceProcessor::count_jni_refs()
       
   230 {
       
   231   class AlwaysAliveClosure: public BoolObjectClosure {
       
   232   public:
       
   233     bool do_object_b(oop obj) { return true; }
       
   234     void do_object(oop obj) { assert(false, "Don't call"); }
       
   235   };
       
   236 
       
   237   class CountHandleClosure: public OopClosure {
       
   238   private:
       
   239     int _count;
       
   240   public:
       
   241     CountHandleClosure(): _count(0) {}
       
   242     void do_oop(oop* unused) {
       
   243       _count++;
       
   244     }
       
   245     int count() { return _count; }
       
   246   };
       
   247   CountHandleClosure global_handle_count;
       
   248   AlwaysAliveClosure always_alive;
       
   249   JNIHandles::weak_oops_do(&always_alive, &global_handle_count);
       
   250   return global_handle_count.count();
       
   251 }
       
   252 #endif
       
   253 
       
   254 void ReferenceProcessor::process_phaseJNI(BoolObjectClosure* is_alive,
       
   255                                           OopClosure*        keep_alive,
       
   256                                           VoidClosure*       complete_gc) {
       
   257 #ifndef PRODUCT
       
   258   if (PrintGCDetails && PrintReferenceGC) {
       
   259     unsigned int count = count_jni_refs();
       
   260     gclog_or_tty->print(", %u refs", count);
       
   261   }
       
   262 #endif
       
   263   JNIHandles::weak_oops_do(is_alive, keep_alive);
       
   264   // Finally remember to keep sentinel around
       
   265   keep_alive->do_oop(&_sentinelRef);
       
   266   complete_gc->do_void();
       
   267 }
       
   268 
       
   269 bool ReferenceProcessor::enqueue_discovered_references(AbstractRefProcTaskExecutor* task_executor) {
       
   270   NOT_PRODUCT(verify_ok_to_handle_reflists());
       
   271   // Remember old value of pending references list
       
   272   oop* pending_list_addr = java_lang_ref_Reference::pending_list_addr();
       
   273   oop old_pending_list_value = *pending_list_addr;
       
   274 
       
   275   // Enqueue references that are not made active again, and
       
   276   // clear the decks for the next collection (cycle).
       
   277   enqueue_discovered_reflists(pending_list_addr, task_executor);
       
   278   // Do the oop-check on pending_list_addr missed in
       
   279   // enqueue_discovered_reflist. We should probably
       
   280   // do a raw oop_check so that future such idempotent
       
   281   // oop_stores relying on the oop-check side-effect
       
   282   // may be elided automatically and safely without
       
   283   // affecting correctness.
       
   284   oop_store(pending_list_addr, *(pending_list_addr));
       
   285 
       
   286   // Stop treating discovered references specially.
       
   287   disable_discovery();
       
   288 
       
   289   // Return true if new pending references were added
       
   290   return old_pending_list_value != *pending_list_addr;
       
   291 }
       
   292 
       
   293 void ReferenceProcessor::enqueue_discovered_reflist(DiscoveredList& refs_list,
       
   294   oop* pending_list_addr) {
       
   295   // Given a list of refs linked through the "discovered" field
       
   296   // (java.lang.ref.Reference.discovered) chain them through the
       
   297   // "next" field (java.lang.ref.Reference.next) and prepend
       
   298   // to the pending list.
       
   299   if (TraceReferenceGC && PrintGCDetails) {
       
   300     gclog_or_tty->print_cr("ReferenceProcessor::enqueue_discovered_reflist list "
       
   301                            INTPTR_FORMAT, (address)refs_list.head());
       
   302   }
       
   303   oop obj = refs_list.head();
       
   304   // Walk down the list, copying the discovered field into
       
   305   // the next field and clearing it (except for the last
       
   306   // non-sentinel object which is treated specially to avoid
       
   307   // confusion with an active reference).
       
   308   while (obj != _sentinelRef) {
       
   309     assert(obj->is_instanceRef(), "should be reference object");
       
   310     oop next = java_lang_ref_Reference::discovered(obj);
       
   311     if (TraceReferenceGC && PrintGCDetails) {
       
   312       gclog_or_tty->print_cr("  obj " INTPTR_FORMAT "/next " INTPTR_FORMAT,
       
   313                              (oopDesc*) obj, (oopDesc*) next);
       
   314     }
       
   315     assert(*java_lang_ref_Reference::next_addr(obj) == NULL,
       
   316       "The reference should not be enqueued");
       
   317     if (next == _sentinelRef) {  // obj is last
       
   318       // Swap refs_list into pendling_list_addr and
       
   319       // set obj's next to what we read from pending_list_addr.
       
   320       oop old = (oop)Atomic::xchg_ptr(refs_list.head(), pending_list_addr);
       
   321       // Need oop_check on pending_list_addr above;
       
   322       // see special oop-check code at the end of
       
   323       // enqueue_discovered_reflists() further below.
       
   324       if (old == NULL) {
       
   325         // obj should be made to point to itself, since
       
   326         // pending list was empty.
       
   327         java_lang_ref_Reference::set_next(obj, obj);
       
   328       } else {
       
   329         java_lang_ref_Reference::set_next(obj, old);
       
   330       }
       
   331     } else {
       
   332       java_lang_ref_Reference::set_next(obj, next);
       
   333     }
       
   334     java_lang_ref_Reference::set_discovered(obj, (oop) NULL);
       
   335     obj = next;
       
   336   }
       
   337 }
       
   338 
       
   339 // Parallel enqueue task
       
   340 class RefProcEnqueueTask: public AbstractRefProcTaskExecutor::EnqueueTask {
       
   341 public:
       
   342   RefProcEnqueueTask(ReferenceProcessor& ref_processor,
       
   343                      DiscoveredList      discovered_refs[],
       
   344                      oop*                pending_list_addr,
       
   345                      oop                 sentinel_ref,
       
   346                      int                 n_queues)
       
   347     : EnqueueTask(ref_processor, discovered_refs,
       
   348                   pending_list_addr, sentinel_ref, n_queues)
       
   349   { }
       
   350 
       
   351   virtual void work(unsigned int work_id)
       
   352   {
       
   353     assert(work_id < (unsigned int)_ref_processor.num_q(), "Index out-of-bounds");
       
   354     // Simplest first cut: static partitioning.
       
   355     int index = work_id;
       
   356     for (int j = 0; j < subclasses_of_ref; j++, index += _n_queues) {
       
   357       _ref_processor.enqueue_discovered_reflist(
       
   358         _refs_lists[index], _pending_list_addr);
       
   359       _refs_lists[index].set_head(_sentinel_ref);
       
   360       _refs_lists[index].set_length(0);
       
   361     }
       
   362   }
       
   363 };
       
   364 
       
   365 // Enqueue references that are not made active again
       
   366 void ReferenceProcessor::enqueue_discovered_reflists(oop* pending_list_addr,
       
   367   AbstractRefProcTaskExecutor* task_executor) {
       
   368   if (_processing_is_mt && task_executor != NULL) {
       
   369     // Parallel code
       
   370     RefProcEnqueueTask tsk(*this, _discoveredSoftRefs,
       
   371                            pending_list_addr, _sentinelRef, _num_q);
       
   372     task_executor->execute(tsk);
       
   373   } else {
       
   374     // Serial code: call the parent class's implementation
       
   375     for (int i = 0; i < _num_q * subclasses_of_ref; i++) {
       
   376       enqueue_discovered_reflist(_discoveredSoftRefs[i], pending_list_addr);
       
   377       _discoveredSoftRefs[i].set_head(_sentinelRef);
       
   378       _discoveredSoftRefs[i].set_length(0);
       
   379     }
       
   380   }
       
   381 }
       
   382 
       
   383 // Iterator for the list of discovered references.
       
   384 class DiscoveredListIterator {
       
   385 public:
       
   386   inline DiscoveredListIterator(DiscoveredList&    refs_list,
       
   387                                 OopClosure*        keep_alive,
       
   388                                 BoolObjectClosure* is_alive);
       
   389 
       
   390   // End Of List.
       
   391   inline bool has_next() const
       
   392   { return _next != ReferenceProcessor::_sentinelRef; }
       
   393 
       
   394   // Get oop to the Reference object.
       
   395   inline oop  obj() const { return _ref; }
       
   396 
       
   397   // Get oop to the referent object.
       
   398   inline oop  referent() const { return _referent; }
       
   399 
       
   400   // Returns true if referent is alive.
       
   401   inline bool is_referent_alive() const;
       
   402 
       
   403   // Loads data for the current reference.
       
   404   // The "allow_null_referent" argument tells us to allow for the possibility
       
   405   // of a NULL referent in the discovered Reference object. This typically
       
   406   // happens in the case of concurrent collectors that may have done the
       
   407   // discovery concurrently or interleaved with mutator execution.
       
   408   inline void load_ptrs(DEBUG_ONLY(bool allow_null_referent));
       
   409 
       
   410   // Move to the next discovered reference.
       
   411   inline void next();
       
   412 
       
   413   // Remove the current reference from the list and move to the next.
       
   414   inline void remove();
       
   415 
       
   416   // Make the Reference object active again.
       
   417   inline void make_active() { java_lang_ref_Reference::set_next(_ref, NULL); }
       
   418 
       
   419   // Make the referent alive.
       
   420   inline void make_referent_alive() { _keep_alive->do_oop(_referent_addr); }
       
   421 
       
   422   // Update the discovered field.
       
   423   inline void update_discovered() { _keep_alive->do_oop(_prev_next); }
       
   424 
       
   425   // NULL out referent pointer.
       
   426   inline void clear_referent() { *_referent_addr = NULL; }
       
   427 
       
   428   // Statistics
       
   429   NOT_PRODUCT(
       
   430   inline size_t processed() const { return _processed; }
       
   431   inline size_t removed() const   { return _removed; }
       
   432   )
       
   433 
       
   434 private:
       
   435   inline void move_to_next();
       
   436 
       
   437 private:
       
   438   DiscoveredList&    _refs_list;
       
   439   oop*               _prev_next;
       
   440   oop                _ref;
       
   441   oop*               _discovered_addr;
       
   442   oop                _next;
       
   443   oop*               _referent_addr;
       
   444   oop                _referent;
       
   445   OopClosure*        _keep_alive;
       
   446   BoolObjectClosure* _is_alive;
       
   447   DEBUG_ONLY(
       
   448   oop                _first_seen; // cyclic linked list check
       
   449   )
       
   450   NOT_PRODUCT(
       
   451   size_t             _processed;
       
   452   size_t             _removed;
       
   453   )
       
   454 };
       
   455 
       
   456 inline DiscoveredListIterator::DiscoveredListIterator(DiscoveredList&    refs_list,
       
   457                                                       OopClosure*        keep_alive,
       
   458                                                       BoolObjectClosure* is_alive)
       
   459   : _refs_list(refs_list),
       
   460     _prev_next(refs_list.head_ptr()),
       
   461     _ref(refs_list.head()),
       
   462 #ifdef ASSERT
       
   463     _first_seen(refs_list.head()),
       
   464 #endif
       
   465 #ifndef PRODUCT
       
   466     _processed(0),
       
   467     _removed(0),
       
   468 #endif
       
   469     _next(refs_list.head()),
       
   470     _keep_alive(keep_alive),
       
   471     _is_alive(is_alive)
       
   472 { }
       
   473 
       
   474 inline bool DiscoveredListIterator::is_referent_alive() const
       
   475 {
       
   476   return _is_alive->do_object_b(_referent);
       
   477 }
       
   478 
       
   479 inline void DiscoveredListIterator::load_ptrs(DEBUG_ONLY(bool allow_null_referent))
       
   480 {
       
   481   _discovered_addr = java_lang_ref_Reference::discovered_addr(_ref);
       
   482   assert(_discovered_addr && (*_discovered_addr)->is_oop_or_null(),
       
   483          "discovered field is bad");
       
   484   _next = *_discovered_addr;
       
   485   _referent_addr = java_lang_ref_Reference::referent_addr(_ref);
       
   486   _referent = *_referent_addr;
       
   487   assert(Universe::heap()->is_in_reserved_or_null(_referent),
       
   488          "Wrong oop found in java.lang.Reference object");
       
   489   assert(allow_null_referent ?
       
   490              _referent->is_oop_or_null()
       
   491            : _referent->is_oop(),
       
   492          "bad referent");
       
   493 }
       
   494 
       
   495 inline void DiscoveredListIterator::next()
       
   496 {
       
   497   _prev_next = _discovered_addr;
       
   498   move_to_next();
       
   499 }
       
   500 
       
   501 inline void DiscoveredListIterator::remove()
       
   502 {
       
   503   assert(_ref->is_oop(), "Dropping a bad reference");
       
   504   // Clear the discovered_addr field so that the object does
       
   505   // not look like it has been discovered.
       
   506   *_discovered_addr = NULL;
       
   507   // Remove Reference object from list.
       
   508   *_prev_next = _next;
       
   509   NOT_PRODUCT(_removed++);
       
   510   move_to_next();
       
   511 }
       
   512 
       
   513 inline void DiscoveredListIterator::move_to_next()
       
   514 {
       
   515   _ref = _next;
       
   516   assert(_ref != _first_seen, "cyclic ref_list found");
       
   517   NOT_PRODUCT(_processed++);
       
   518 }
       
   519 
       
   520 
       
   521 // NOTE: process_phase*() are largely similar, and at a high level
       
   522 // merely iterate over the extant list applying a predicate to
       
   523 // each of its elements and possibly removing that element from the
       
   524 // list and applying some further closures to that element.
       
   525 // We should consider the possibility of replacing these
       
   526 // process_phase*() methods by abstracting them into
       
   527 // a single general iterator invocation that receives appropriate
       
   528 // closures that accomplish this work.
       
   529 
       
   530 // (SoftReferences only) Traverse the list and remove any SoftReferences whose
       
   531 // referents are not alive, but that should be kept alive for policy reasons.
       
   532 // Keep alive the transitive closure of all such referents.
       
   533 void
       
   534 ReferenceProcessor::process_phase1(DiscoveredList&    refs_list_addr,
       
   535                                    ReferencePolicy*   policy,
       
   536                                    BoolObjectClosure* is_alive,
       
   537                                    OopClosure*        keep_alive,
       
   538                                    VoidClosure*       complete_gc) {
       
   539   assert(policy != NULL, "Must have a non-NULL policy");
       
   540   DiscoveredListIterator iter(refs_list_addr, keep_alive, is_alive);
       
   541   // Decide which softly reachable refs should be kept alive.
       
   542   while (iter.has_next()) {
       
   543     iter.load_ptrs(DEBUG_ONLY(!discovery_is_atomic() /* allow_null_referent */));
       
   544     bool referent_is_dead = (iter.referent() != NULL) && !iter.is_referent_alive();
       
   545     if (referent_is_dead && !policy->should_clear_reference(iter.obj())) {
       
   546       if (TraceReferenceGC) {
       
   547         gclog_or_tty->print_cr("Dropping reference (" INTPTR_FORMAT ": %s"  ") by policy",
       
   548                                (address)iter.obj(), iter.obj()->blueprint()->internal_name());
       
   549       }
       
   550       // Make the Reference object active again
       
   551       iter.make_active();
       
   552       // keep the referent around
       
   553       iter.make_referent_alive();
       
   554       // Remove Reference object from list
       
   555       iter.remove();
       
   556     } else {
       
   557       iter.next();
       
   558     }
       
   559   }
       
   560   // Close the reachable set
       
   561   complete_gc->do_void();
       
   562   NOT_PRODUCT(
       
   563     if (PrintGCDetails && TraceReferenceGC) {
       
   564       gclog_or_tty->print(" Dropped %d dead Refs out of %d "
       
   565         "discovered Refs by policy ", iter.removed(), iter.processed());
       
   566     }
       
   567   )
       
   568 }
       
   569 
       
   570 // Traverse the list and remove any Refs that are not active, or
       
   571 // whose referents are either alive or NULL.
       
   572 void
       
   573 ReferenceProcessor::pp2_work(DiscoveredList&    refs_list_addr,
       
   574                              BoolObjectClosure* is_alive,
       
   575                              OopClosure*        keep_alive)
       
   576 {
       
   577   assert(discovery_is_atomic(), "Error");
       
   578   DiscoveredListIterator iter(refs_list_addr, keep_alive, is_alive);
       
   579   while (iter.has_next()) {
       
   580     iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
       
   581     DEBUG_ONLY(oop* next_addr = java_lang_ref_Reference::next_addr(iter.obj());)
       
   582     assert(*next_addr == NULL, "Should not discover inactive Reference");
       
   583     if (iter.is_referent_alive()) {
       
   584       if (TraceReferenceGC) {
       
   585         gclog_or_tty->print_cr("Dropping strongly reachable reference (" INTPTR_FORMAT ": %s)",
       
   586                                (address)iter.obj(), iter.obj()->blueprint()->internal_name());
       
   587       }
       
   588       // The referent is reachable after all.
       
   589       // Update the referent pointer as necessary: Note that this
       
   590       // should not entail any recursive marking because the
       
   591       // referent must already have been traversed.
       
   592       iter.make_referent_alive();
       
   593       // Remove Reference object from list
       
   594       iter.remove();
       
   595     } else {
       
   596       iter.next();
       
   597     }
       
   598   }
       
   599   NOT_PRODUCT(
       
   600     if (PrintGCDetails && TraceReferenceGC) {
       
   601       gclog_or_tty->print(" Dropped %d active Refs out of %d "
       
   602         "Refs in discovered list ", iter.removed(), iter.processed());
       
   603     }
       
   604   )
       
   605 }
       
   606 
       
   607 void
       
   608 ReferenceProcessor::pp2_work_concurrent_discovery(
       
   609   DiscoveredList&    refs_list_addr,
       
   610   BoolObjectClosure* is_alive,
       
   611   OopClosure*        keep_alive,
       
   612   VoidClosure*       complete_gc)
       
   613 {
       
   614   assert(!discovery_is_atomic(), "Error");
       
   615   DiscoveredListIterator iter(refs_list_addr, keep_alive, is_alive);
       
   616   while (iter.has_next()) {
       
   617     iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
       
   618     oop* next_addr = java_lang_ref_Reference::next_addr(iter.obj());
       
   619     if ((iter.referent() == NULL || iter.is_referent_alive() ||
       
   620          *next_addr != NULL)) {
       
   621       assert((*next_addr)->is_oop_or_null(), "bad next field");
       
   622       // Remove Reference object from list
       
   623       iter.remove();
       
   624       // Trace the cohorts
       
   625       iter.make_referent_alive();
       
   626       keep_alive->do_oop(next_addr);
       
   627     } else {
       
   628       iter.next();
       
   629     }
       
   630   }
       
   631   // Now close the newly reachable set
       
   632   complete_gc->do_void();
       
   633   NOT_PRODUCT(
       
   634     if (PrintGCDetails && TraceReferenceGC) {
       
   635       gclog_or_tty->print(" Dropped %d active Refs out of %d "
       
   636         "Refs in discovered list ", iter.removed(), iter.processed());
       
   637     }
       
   638   )
       
   639 }
       
   640 
       
   641 // Traverse the list and process the referents, by either
       
   642 // either clearing them or keeping them (and their reachable
       
   643 // closure) alive.
       
   644 void
       
   645 ReferenceProcessor::process_phase3(DiscoveredList&    refs_list_addr,
       
   646                                    bool               clear_referent,
       
   647                                    BoolObjectClosure* is_alive,
       
   648                                    OopClosure*        keep_alive,
       
   649                                    VoidClosure*       complete_gc) {
       
   650   DiscoveredListIterator iter(refs_list_addr, keep_alive, is_alive);
       
   651   while (iter.has_next()) {
       
   652     iter.update_discovered();
       
   653     iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
       
   654     if (clear_referent) {
       
   655       // NULL out referent pointer
       
   656       iter.clear_referent();
       
   657     } else {
       
   658       // keep the referent around
       
   659       iter.make_referent_alive();
       
   660     }
       
   661     if (TraceReferenceGC) {
       
   662       gclog_or_tty->print_cr("Adding %sreference (" INTPTR_FORMAT ": %s) as pending",
       
   663                              clear_referent ? "cleared " : "",
       
   664                              (address)iter.obj(), iter.obj()->blueprint()->internal_name());
       
   665     }
       
   666     assert(iter.obj()->is_oop(UseConcMarkSweepGC), "Adding a bad reference");
       
   667     // If discovery is concurrent, we may have objects with null referents,
       
   668     // being those that were concurrently cleared after they were discovered
       
   669     // (and not subsequently precleaned).
       
   670     assert(   (discovery_is_atomic() && iter.referent()->is_oop())
       
   671            || (!discovery_is_atomic() && iter.referent()->is_oop_or_null(UseConcMarkSweepGC)),
       
   672            "Adding a bad referent");
       
   673     iter.next();
       
   674   }
       
   675   // Remember to keep sentinel pointer around
       
   676   iter.update_discovered();
       
   677   // Close the reachable set
       
   678   complete_gc->do_void();
       
   679 }
       
   680 
       
   681 void
       
   682 ReferenceProcessor::abandon_partial_discovered_list(DiscoveredList& ref_list) {
       
   683   oop obj = ref_list.head();
       
   684   while (obj != _sentinelRef) {
       
   685     oop* discovered_addr = java_lang_ref_Reference::discovered_addr(obj);
       
   686     obj = *discovered_addr;
       
   687     *discovered_addr = NULL;
       
   688   }
       
   689   ref_list.set_head(_sentinelRef);
       
   690   ref_list.set_length(0);
       
   691 }
       
   692 
       
   693 void
       
   694 ReferenceProcessor::abandon_partial_discovered_list_arr(DiscoveredList refs_lists[]) {
       
   695   for (int i = 0; i < _num_q; i++) {
       
   696     abandon_partial_discovered_list(refs_lists[i]);
       
   697   }
       
   698 }
       
   699 
       
   700 class RefProcPhase1Task: public AbstractRefProcTaskExecutor::ProcessTask {
       
   701 public:
       
   702   RefProcPhase1Task(ReferenceProcessor& ref_processor,
       
   703                     DiscoveredList      refs_lists[],
       
   704                     ReferencePolicy*    policy,
       
   705                     bool                marks_oops_alive)
       
   706     : ProcessTask(ref_processor, refs_lists, marks_oops_alive),
       
   707       _policy(policy)
       
   708   { }
       
   709   virtual void work(unsigned int i, BoolObjectClosure& is_alive,
       
   710                     OopClosure& keep_alive,
       
   711                     VoidClosure& complete_gc)
       
   712   {
       
   713     _ref_processor.process_phase1(_refs_lists[i], _policy,
       
   714                                   &is_alive, &keep_alive, &complete_gc);
       
   715   }
       
   716 private:
       
   717   ReferencePolicy* _policy;
       
   718 };
       
   719 
       
   720 class RefProcPhase2Task: public AbstractRefProcTaskExecutor::ProcessTask {
       
   721 public:
       
   722   RefProcPhase2Task(ReferenceProcessor& ref_processor,
       
   723                     DiscoveredList      refs_lists[],
       
   724                     bool                marks_oops_alive)
       
   725     : ProcessTask(ref_processor, refs_lists, marks_oops_alive)
       
   726   { }
       
   727   virtual void work(unsigned int i, BoolObjectClosure& is_alive,
       
   728                     OopClosure& keep_alive,
       
   729                     VoidClosure& complete_gc)
       
   730   {
       
   731     _ref_processor.process_phase2(_refs_lists[i],
       
   732                                   &is_alive, &keep_alive, &complete_gc);
       
   733   }
       
   734 };
       
   735 
       
   736 class RefProcPhase3Task: public AbstractRefProcTaskExecutor::ProcessTask {
       
   737 public:
       
   738   RefProcPhase3Task(ReferenceProcessor& ref_processor,
       
   739                     DiscoveredList      refs_lists[],
       
   740                     bool                clear_referent,
       
   741                     bool                marks_oops_alive)
       
   742     : ProcessTask(ref_processor, refs_lists, marks_oops_alive),
       
   743       _clear_referent(clear_referent)
       
   744   { }
       
   745   virtual void work(unsigned int i, BoolObjectClosure& is_alive,
       
   746                     OopClosure& keep_alive,
       
   747                     VoidClosure& complete_gc)
       
   748   {
       
   749     _ref_processor.process_phase3(_refs_lists[i], _clear_referent,
       
   750                                   &is_alive, &keep_alive, &complete_gc);
       
   751   }
       
   752 private:
       
   753   bool _clear_referent;
       
   754 };
       
   755 
       
   756 // Balances reference queues.
       
   757 void ReferenceProcessor::balance_queues(DiscoveredList ref_lists[])
       
   758 {
       
   759   // calculate total length
       
   760   size_t total_refs = 0;
       
   761   for (int i = 0; i < _num_q; ++i) {
       
   762     total_refs += ref_lists[i].length();
       
   763   }
       
   764   size_t avg_refs = total_refs / _num_q + 1;
       
   765   int to_idx = 0;
       
   766   for (int from_idx = 0; from_idx < _num_q; from_idx++) {
       
   767     while (ref_lists[from_idx].length() > avg_refs) {
       
   768       assert(to_idx < _num_q, "Sanity Check!");
       
   769       if (ref_lists[to_idx].length() < avg_refs) {
       
   770         // move superfluous refs
       
   771         size_t refs_to_move =
       
   772           MIN2(ref_lists[from_idx].length() - avg_refs,
       
   773                avg_refs - ref_lists[to_idx].length());
       
   774         oop move_head = ref_lists[from_idx].head();
       
   775         oop move_tail = move_head;
       
   776         oop new_head  = move_head;
       
   777         // find an element to split the list on
       
   778         for (size_t j = 0; j < refs_to_move; ++j) {
       
   779           move_tail = new_head;
       
   780           new_head = *java_lang_ref_Reference::discovered_addr(new_head);
       
   781         }
       
   782         java_lang_ref_Reference::set_discovered(move_tail, ref_lists[to_idx].head());
       
   783         ref_lists[to_idx].set_head(move_head);
       
   784         ref_lists[to_idx].set_length(ref_lists[to_idx].length() + refs_to_move);
       
   785         ref_lists[from_idx].set_head(new_head);
       
   786         ref_lists[from_idx].set_length(ref_lists[from_idx].length() - refs_to_move);
       
   787       } else {
       
   788         ++to_idx;
       
   789       }
       
   790     }
       
   791   }
       
   792 }
       
   793 
       
   794 void
       
   795 ReferenceProcessor::process_discovered_reflist(
       
   796   DiscoveredList               refs_lists[],
       
   797   ReferencePolicy*             policy,
       
   798   bool                         clear_referent,
       
   799   BoolObjectClosure*           is_alive,
       
   800   OopClosure*                  keep_alive,
       
   801   VoidClosure*                 complete_gc,
       
   802   AbstractRefProcTaskExecutor* task_executor)
       
   803 {
       
   804   bool mt = task_executor != NULL && _processing_is_mt;
       
   805   if (mt && ParallelRefProcBalancingEnabled) {
       
   806     balance_queues(refs_lists);
       
   807   }
       
   808   if (PrintReferenceGC && PrintGCDetails) {
       
   809     size_t total = 0;
       
   810     for (int i = 0; i < _num_q; ++i) {
       
   811       total += refs_lists[i].length();
       
   812     }
       
   813     gclog_or_tty->print(", %u refs", total);
       
   814   }
       
   815 
       
   816   // Phase 1 (soft refs only):
       
   817   // . Traverse the list and remove any SoftReferences whose
       
   818   //   referents are not alive, but that should be kept alive for
       
   819   //   policy reasons. Keep alive the transitive closure of all
       
   820   //   such referents.
       
   821   if (policy != NULL) {
       
   822     if (mt) {
       
   823       RefProcPhase1Task phase1(*this, refs_lists, policy, true /*marks_oops_alive*/);
       
   824       task_executor->execute(phase1);
       
   825     } else {
       
   826       for (int i = 0; i < _num_q; i++) {
       
   827         process_phase1(refs_lists[i], policy,
       
   828                        is_alive, keep_alive, complete_gc);
       
   829       }
       
   830     }
       
   831   } else { // policy == NULL
       
   832     assert(refs_lists != _discoveredSoftRefs,
       
   833            "Policy must be specified for soft references.");
       
   834   }
       
   835 
       
   836   // Phase 2:
       
   837   // . Traverse the list and remove any refs whose referents are alive.
       
   838   if (mt) {
       
   839     RefProcPhase2Task phase2(*this, refs_lists, !discovery_is_atomic() /*marks_oops_alive*/);
       
   840     task_executor->execute(phase2);
       
   841   } else {
       
   842     for (int i = 0; i < _num_q; i++) {
       
   843       process_phase2(refs_lists[i], is_alive, keep_alive, complete_gc);
       
   844     }
       
   845   }
       
   846 
       
   847   // Phase 3:
       
   848   // . Traverse the list and process referents as appropriate.
       
   849   if (mt) {
       
   850     RefProcPhase3Task phase3(*this, refs_lists, clear_referent, true /*marks_oops_alive*/);
       
   851     task_executor->execute(phase3);
       
   852   } else {
       
   853     for (int i = 0; i < _num_q; i++) {
       
   854       process_phase3(refs_lists[i], clear_referent,
       
   855                      is_alive, keep_alive, complete_gc);
       
   856     }
       
   857   }
       
   858 }
       
   859 
       
   860 void ReferenceProcessor::clean_up_discovered_references() {
       
   861   // loop over the lists
       
   862   for (int i = 0; i < _num_q * subclasses_of_ref; i++) {
       
   863     if (TraceReferenceGC && PrintGCDetails && ((i % _num_q) == 0)) {
       
   864       gclog_or_tty->print_cr(
       
   865         "\nScrubbing %s discovered list of Null referents",
       
   866         list_name(i));
       
   867     }
       
   868     clean_up_discovered_reflist(_discoveredSoftRefs[i]);
       
   869   }
       
   870 }
       
   871 
       
   872 void ReferenceProcessor::clean_up_discovered_reflist(DiscoveredList& refs_list) {
       
   873   assert(!discovery_is_atomic(), "Else why call this method?");
       
   874   DiscoveredListIterator iter(refs_list, NULL, NULL);
       
   875   size_t length = refs_list.length();
       
   876   while (iter.has_next()) {
       
   877     iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
       
   878     oop* next_addr = java_lang_ref_Reference::next_addr(iter.obj());
       
   879     assert((*next_addr)->is_oop_or_null(), "bad next field");
       
   880     // If referent has been cleared or Reference is not active,
       
   881     // drop it.
       
   882     if (iter.referent() == NULL || *next_addr != NULL) {
       
   883       debug_only(
       
   884         if (PrintGCDetails && TraceReferenceGC) {
       
   885           gclog_or_tty->print_cr("clean_up_discovered_list: Dropping Reference: "
       
   886             INTPTR_FORMAT " with next field: " INTPTR_FORMAT
       
   887             " and referent: " INTPTR_FORMAT,
       
   888             (address)iter.obj(), (address)*next_addr, (address)iter.referent());
       
   889         }
       
   890       )
       
   891       // Remove Reference object from list
       
   892       iter.remove();
       
   893       --length;
       
   894     } else {
       
   895       iter.next();
       
   896     }
       
   897   }
       
   898   refs_list.set_length(length);
       
   899   NOT_PRODUCT(
       
   900     if (PrintGCDetails && TraceReferenceGC) {
       
   901       gclog_or_tty->print(
       
   902         " Removed %d Refs with NULL referents out of %d discovered Refs",
       
   903         iter.removed(), iter.processed());
       
   904     }
       
   905   )
       
   906 }
       
   907 
       
   908 inline DiscoveredList* ReferenceProcessor::get_discovered_list(ReferenceType rt) {
       
   909   int id = 0;
       
   910   // Determine the queue index to use for this object.
       
   911   if (_discovery_is_mt) {
       
   912     // During a multi-threaded discovery phase,
       
   913     // each thread saves to its "own" list.
       
   914     Thread* thr = Thread::current();
       
   915     assert(thr->is_GC_task_thread(),
       
   916            "Dubious cast from Thread* to WorkerThread*?");
       
   917     id = ((WorkerThread*)thr)->id();
       
   918   } else {
       
   919     // single-threaded discovery, we save in round-robin
       
   920     // fashion to each of the lists.
       
   921     if (_processing_is_mt) {
       
   922       id = next_id();
       
   923     }
       
   924   }
       
   925   assert(0 <= id && id < _num_q, "Id is out-of-bounds (call Freud?)");
       
   926 
       
   927   // Get the discovered queue to which we will add
       
   928   DiscoveredList* list = NULL;
       
   929   switch (rt) {
       
   930     case REF_OTHER:
       
   931       // Unknown reference type, no special treatment
       
   932       break;
       
   933     case REF_SOFT:
       
   934       list = &_discoveredSoftRefs[id];
       
   935       break;
       
   936     case REF_WEAK:
       
   937       list = &_discoveredWeakRefs[id];
       
   938       break;
       
   939     case REF_FINAL:
       
   940       list = &_discoveredFinalRefs[id];
       
   941       break;
       
   942     case REF_PHANTOM:
       
   943       list = &_discoveredPhantomRefs[id];
       
   944       break;
       
   945     case REF_NONE:
       
   946       // we should not reach here if we are an instanceRefKlass
       
   947     default:
       
   948       ShouldNotReachHere();
       
   949   }
       
   950   return list;
       
   951 }
       
   952 
       
   953 inline void ReferenceProcessor::add_to_discovered_list_mt(DiscoveredList& list,
       
   954   oop obj, oop* discovered_addr) {
       
   955   assert(_discovery_is_mt, "!_discovery_is_mt should have been handled by caller");
       
   956   // First we must make sure this object is only enqueued once. CAS in a non null
       
   957   // discovered_addr.
       
   958   oop retest = (oop)Atomic::cmpxchg_ptr(list.head(), discovered_addr, NULL);
       
   959   if (retest == NULL) {
       
   960     // This thread just won the right to enqueue the object.
       
   961     // We have separate lists for enqueueing so no synchronization
       
   962     // is necessary.
       
   963     list.set_head(obj);
       
   964     list.set_length(list.length() + 1);
       
   965   } else {
       
   966     // If retest was non NULL, another thread beat us to it:
       
   967     // The reference has already been discovered...
       
   968     if (TraceReferenceGC) {
       
   969       gclog_or_tty->print_cr("Already enqueued reference (" INTPTR_FORMAT ": %s)",
       
   970                              obj, obj->blueprint()->internal_name());
       
   971     }
       
   972   }
       
   973 }
       
   974 
       
   975 
       
   976 // We mention two of several possible choices here:
       
   977 // #0: if the reference object is not in the "originating generation"
       
   978 //     (or part of the heap being collected, indicated by our "span"
       
   979 //     we don't treat it specially (i.e. we scan it as we would
       
   980 //     a normal oop, treating its references as strong references).
       
   981 //     This means that references can't be enqueued unless their
       
   982 //     referent is also in the same span. This is the simplest,
       
   983 //     most "local" and most conservative approach, albeit one
       
   984 //     that may cause weak references to be enqueued least promptly.
       
   985 //     We call this choice the "ReferenceBasedDiscovery" policy.
       
   986 // #1: the reference object may be in any generation (span), but if
       
   987 //     the referent is in the generation (span) being currently collected
       
   988 //     then we can discover the reference object, provided
       
   989 //     the object has not already been discovered by
       
   990 //     a different concurrently running collector (as may be the
       
   991 //     case, for instance, if the reference object is in CMS and
       
   992 //     the referent in DefNewGeneration), and provided the processing
       
   993 //     of this reference object by the current collector will
       
   994 //     appear atomic to every other collector in the system.
       
   995 //     (Thus, for instance, a concurrent collector may not
       
   996 //     discover references in other generations even if the
       
   997 //     referent is in its own generation). This policy may,
       
   998 //     in certain cases, enqueue references somewhat sooner than
       
   999 //     might Policy #0 above, but at marginally increased cost
       
  1000 //     and complexity in processing these references.
       
  1001 //     We call this choice the "RefeferentBasedDiscovery" policy.
       
  1002 bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) {
       
  1003   // We enqueue references only if we are discovering refs
       
  1004   // (rather than processing discovered refs).
       
  1005   if (!_discovering_refs || !RegisterReferences) {
       
  1006     return false;
       
  1007   }
       
  1008   // We only enqueue active references.
       
  1009   oop* next_addr = java_lang_ref_Reference::next_addr(obj);
       
  1010   if (*next_addr != NULL) {
       
  1011     return false;
       
  1012   }
       
  1013 
       
  1014   HeapWord* obj_addr = (HeapWord*)obj;
       
  1015   if (RefDiscoveryPolicy == ReferenceBasedDiscovery &&
       
  1016       !_span.contains(obj_addr)) {
       
  1017     // Reference is not in the originating generation;
       
  1018     // don't treat it specially (i.e. we want to scan it as a normal
       
  1019     // object with strong references).
       
  1020     return false;
       
  1021   }
       
  1022 
       
  1023   // We only enqueue references whose referents are not (yet) strongly
       
  1024   // reachable.
       
  1025   if (is_alive_non_header() != NULL) {
       
  1026     oop referent = java_lang_ref_Reference::referent(obj);
       
  1027     // We'd like to assert the following:
       
  1028     // assert(referent != NULL, "Refs with null referents already filtered");
       
  1029     // However, since this code may be executed concurrently with
       
  1030     // mutators, which can clear() the referent, it is not
       
  1031     // guaranteed that the referent is non-NULL.
       
  1032     if (is_alive_non_header()->do_object_b(referent)) {
       
  1033       return false;  // referent is reachable
       
  1034     }
       
  1035   }
       
  1036 
       
  1037   oop* discovered_addr = java_lang_ref_Reference::discovered_addr(obj);
       
  1038   assert(discovered_addr != NULL && (*discovered_addr)->is_oop_or_null(),
       
  1039          "bad discovered field");
       
  1040   if (*discovered_addr != NULL) {
       
  1041     // The reference has already been discovered...
       
  1042     if (TraceReferenceGC) {
       
  1043       gclog_or_tty->print_cr("Already enqueued reference (" INTPTR_FORMAT ": %s)",
       
  1044                              (oopDesc*)obj, obj->blueprint()->internal_name());
       
  1045     }
       
  1046     if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
       
  1047       // assumes that an object is not processed twice;
       
  1048       // if it's been already discovered it must be on another
       
  1049       // generation's discovered list; so we won't discover it.
       
  1050       return false;
       
  1051     } else {
       
  1052       assert(RefDiscoveryPolicy == ReferenceBasedDiscovery,
       
  1053              "Unrecognized policy");
       
  1054       // Check assumption that an object is not potentially
       
  1055       // discovered twice except by concurrent collectors that potentially
       
  1056       // trace the same Reference object twice.
       
  1057       assert(UseConcMarkSweepGC,
       
  1058              "Only possible with a concurrent collector");
       
  1059       return true;
       
  1060     }
       
  1061   }
       
  1062 
       
  1063   if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
       
  1064     oop referent = java_lang_ref_Reference::referent(obj);
       
  1065     assert(referent->is_oop(), "bad referent");
       
  1066     // enqueue if and only if either:
       
  1067     // reference is in our span or
       
  1068     // we are an atomic collector and referent is in our span
       
  1069     if (_span.contains(obj_addr) ||
       
  1070         (discovery_is_atomic() && _span.contains(referent))) {
       
  1071       // should_enqueue = true;
       
  1072     } else {
       
  1073       return false;
       
  1074     }
       
  1075   } else {
       
  1076     assert(RefDiscoveryPolicy == ReferenceBasedDiscovery &&
       
  1077            _span.contains(obj_addr), "code inconsistency");
       
  1078   }
       
  1079 
       
  1080   // Get the right type of discovered queue head.
       
  1081   DiscoveredList* list = get_discovered_list(rt);
       
  1082   if (list == NULL) {
       
  1083     return false;   // nothing special needs to be done
       
  1084   }
       
  1085 
       
  1086   // We do a raw store here, the field will be visited later when
       
  1087   // processing the discovered references.
       
  1088   if (_discovery_is_mt) {
       
  1089     add_to_discovered_list_mt(*list, obj, discovered_addr);
       
  1090   } else {
       
  1091     *discovered_addr = list->head();
       
  1092     list->set_head(obj);
       
  1093     list->set_length(list->length() + 1);
       
  1094   }
       
  1095 
       
  1096   // In the MT discovery case, it is currently possible to see
       
  1097   // the following message multiple times if several threads
       
  1098   // discover a reference about the same time. Only one will
       
  1099   // however have actually added it to the disocvered queue.
       
  1100   // One could let add_to_discovered_list_mt() return an
       
  1101   // indication for success in queueing (by 1 thread) or
       
  1102   // failure (by all other threads), but I decided the extra
       
  1103   // code was not worth the effort for something that is
       
  1104   // only used for debugging support.
       
  1105   if (TraceReferenceGC) {
       
  1106     oop referent = java_lang_ref_Reference::referent(obj);
       
  1107     if (PrintGCDetails) {
       
  1108       gclog_or_tty->print_cr("Enqueued reference (" INTPTR_FORMAT ": %s)",
       
  1109                              (oopDesc*) obj, obj->blueprint()->internal_name());
       
  1110     }
       
  1111     assert(referent->is_oop(), "Enqueued a bad referent");
       
  1112   }
       
  1113   assert(obj->is_oop(), "Enqueued a bad reference");
       
  1114   return true;
       
  1115 }
       
  1116 
       
  1117 // Preclean the discovered references by removing those
       
  1118 // whose referents are alive, and by marking from those that
       
  1119 // are not active. These lists can be handled here
       
  1120 // in any order and, indeed, concurrently.
       
  1121 void ReferenceProcessor::preclean_discovered_references(
       
  1122   BoolObjectClosure* is_alive,
       
  1123   OopClosure* keep_alive,
       
  1124   VoidClosure* complete_gc,
       
  1125   YieldClosure* yield) {
       
  1126 
       
  1127   NOT_PRODUCT(verify_ok_to_handle_reflists());
       
  1128 
       
  1129   // Soft references
       
  1130   {
       
  1131     TraceTime tt("Preclean SoftReferences", PrintGCDetails && PrintReferenceGC,
       
  1132               false, gclog_or_tty);
       
  1133     for (int i = 0; i < _num_q; i++) {
       
  1134       preclean_discovered_reflist(_discoveredSoftRefs[i], is_alive,
       
  1135                                   keep_alive, complete_gc, yield);
       
  1136     }
       
  1137   }
       
  1138   if (yield->should_return()) {
       
  1139     return;
       
  1140   }
       
  1141 
       
  1142   // Weak references
       
  1143   {
       
  1144     TraceTime tt("Preclean WeakReferences", PrintGCDetails && PrintReferenceGC,
       
  1145               false, gclog_or_tty);
       
  1146     for (int i = 0; i < _num_q; i++) {
       
  1147       preclean_discovered_reflist(_discoveredWeakRefs[i], is_alive,
       
  1148                                   keep_alive, complete_gc, yield);
       
  1149     }
       
  1150   }
       
  1151   if (yield->should_return()) {
       
  1152     return;
       
  1153   }
       
  1154 
       
  1155   // Final references
       
  1156   {
       
  1157     TraceTime tt("Preclean FinalReferences", PrintGCDetails && PrintReferenceGC,
       
  1158               false, gclog_or_tty);
       
  1159     for (int i = 0; i < _num_q; i++) {
       
  1160       preclean_discovered_reflist(_discoveredFinalRefs[i], is_alive,
       
  1161                                   keep_alive, complete_gc, yield);
       
  1162     }
       
  1163   }
       
  1164   if (yield->should_return()) {
       
  1165     return;
       
  1166   }
       
  1167 
       
  1168   // Phantom references
       
  1169   {
       
  1170     TraceTime tt("Preclean PhantomReferences", PrintGCDetails && PrintReferenceGC,
       
  1171               false, gclog_or_tty);
       
  1172     for (int i = 0; i < _num_q; i++) {
       
  1173       preclean_discovered_reflist(_discoveredPhantomRefs[i], is_alive,
       
  1174                                   keep_alive, complete_gc, yield);
       
  1175     }
       
  1176   }
       
  1177 }
       
  1178 
       
  1179 // Walk the given discovered ref list, and remove all reference objects
       
  1180 // whose referents are still alive, whose referents are NULL or which
       
  1181 // are not active (have a non-NULL next field). NOTE: For this to work
       
  1182 // correctly, refs discovery can not be happening concurrently with this
       
  1183 // step.
       
  1184 void ReferenceProcessor::preclean_discovered_reflist(
       
  1185   DiscoveredList& refs_list, BoolObjectClosure* is_alive,
       
  1186   OopClosure* keep_alive, VoidClosure* complete_gc, YieldClosure* yield) {
       
  1187 
       
  1188   DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
       
  1189   size_t length = refs_list.length();
       
  1190   while (iter.has_next()) {
       
  1191     iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
       
  1192     oop* next_addr = java_lang_ref_Reference::next_addr(iter.obj());
       
  1193     if (iter.referent() == NULL || iter.is_referent_alive() ||
       
  1194         *next_addr != NULL) {
       
  1195       // The referent has been cleared, or is alive, or the Reference is not
       
  1196       // active; we need to trace and mark its cohort.
       
  1197       if (TraceReferenceGC) {
       
  1198         gclog_or_tty->print_cr("Precleaning Reference (" INTPTR_FORMAT ": %s)",
       
  1199                                iter.obj(), iter.obj()->blueprint()->internal_name());
       
  1200       }
       
  1201       // Remove Reference object from list
       
  1202       iter.remove();
       
  1203       --length;
       
  1204       // Keep alive its cohort.
       
  1205       iter.make_referent_alive();
       
  1206       keep_alive->do_oop(next_addr);
       
  1207     } else {
       
  1208       iter.next();
       
  1209     }
       
  1210   }
       
  1211   refs_list.set_length(length);
       
  1212 
       
  1213   // Close the reachable set
       
  1214   complete_gc->do_void();
       
  1215 
       
  1216   NOT_PRODUCT(
       
  1217     if (PrintGCDetails && PrintReferenceGC) {
       
  1218       gclog_or_tty->print(" Dropped %d Refs out of %d "
       
  1219         "Refs in discovered list ", iter.removed(), iter.processed());
       
  1220     }
       
  1221   )
       
  1222 }
       
  1223 
       
  1224 const char* ReferenceProcessor::list_name(int i) {
       
  1225    assert(i >= 0 && i <= _num_q * subclasses_of_ref, "Out of bounds index");
       
  1226    int j = i / _num_q;
       
  1227    switch (j) {
       
  1228      case 0: return "SoftRef";
       
  1229      case 1: return "WeakRef";
       
  1230      case 2: return "FinalRef";
       
  1231      case 3: return "PhantomRef";
       
  1232    }
       
  1233    ShouldNotReachHere();
       
  1234    return NULL;
       
  1235 }
       
  1236 
       
  1237 #ifndef PRODUCT
       
  1238 void ReferenceProcessor::verify_ok_to_handle_reflists() {
       
  1239   // empty for now
       
  1240 }
       
  1241 #endif
       
  1242 
       
  1243 void ReferenceProcessor::verify() {
       
  1244   guarantee(_sentinelRef != NULL && _sentinelRef->is_oop(), "Lost _sentinelRef");
       
  1245 }
       
  1246 
       
  1247 #ifndef PRODUCT
       
  1248 void ReferenceProcessor::clear_discovered_references() {
       
  1249   guarantee(!_discovering_refs, "Discovering refs?");
       
  1250   for (int i = 0; i < _num_q * subclasses_of_ref; i++) {
       
  1251     oop obj = _discoveredSoftRefs[i].head();
       
  1252     while (obj != _sentinelRef) {
       
  1253       oop next = java_lang_ref_Reference::discovered(obj);
       
  1254       java_lang_ref_Reference::set_discovered(obj, (oop) NULL);
       
  1255       obj = next;
       
  1256     }
       
  1257     _discoveredSoftRefs[i].set_head(_sentinelRef);
       
  1258     _discoveredSoftRefs[i].set_length(0);
       
  1259   }
       
  1260 }
       
  1261 #endif // PRODUCT