src/hotspot/share/gc/z/zReferenceProcessor.cpp
changeset 53539 4ffdb0093c89
parent 53161 03efcf7fc34b
child 54623 1126f0607c70
equal deleted inserted replaced
53538:cbefe308b543 53539:4ffdb0093c89
    37 #include "runtime/os.hpp"
    37 #include "runtime/os.hpp"
    38 
    38 
    39 static const ZStatSubPhase ZSubPhaseConcurrentReferencesProcess("Concurrent References Process");
    39 static const ZStatSubPhase ZSubPhaseConcurrentReferencesProcess("Concurrent References Process");
    40 static const ZStatSubPhase ZSubPhaseConcurrentReferencesEnqueue("Concurrent References Enqueue");
    40 static const ZStatSubPhase ZSubPhaseConcurrentReferencesEnqueue("Concurrent References Enqueue");
    41 
    41 
       
    42 static ReferenceType reference_type(oop reference) {
       
    43   return InstanceKlass::cast(reference->klass())->reference_type();
       
    44 }
       
    45 
       
    46 static const char* reference_type_name(ReferenceType type) {
       
    47   switch (type) {
       
    48   case REF_SOFT:
       
    49     return "Soft";
       
    50 
       
    51   case REF_WEAK:
       
    52     return "Weak";
       
    53 
       
    54   case REF_FINAL:
       
    55     return "Final";
       
    56 
       
    57   case REF_PHANTOM:
       
    58     return "Phantom";
       
    59 
       
    60   default:
       
    61     ShouldNotReachHere();
       
    62     return NULL;
       
    63   }
       
    64 }
       
    65 
       
    66 static volatile oop* reference_referent_addr(oop reference) {
       
    67   return (volatile oop*)java_lang_ref_Reference::referent_addr_raw(reference);
       
    68 }
       
    69 
       
    70 static oop reference_referent(oop reference) {
       
    71   return *reference_referent_addr(reference);
       
    72 }
       
    73 
       
    74 static void reference_set_referent(oop reference, oop referent) {
       
    75   java_lang_ref_Reference::set_referent_raw(reference, referent);
       
    76 }
       
    77 
       
    78 static oop* reference_discovered_addr(oop reference) {
       
    79   return (oop*)java_lang_ref_Reference::discovered_addr_raw(reference);
       
    80 }
       
    81 
       
    82 static oop reference_discovered(oop reference) {
       
    83   return *reference_discovered_addr(reference);
       
    84 }
       
    85 
       
    86 static void reference_set_discovered(oop reference, oop discovered) {
       
    87   java_lang_ref_Reference::set_discovered_raw(reference, discovered);
       
    88 }
       
    89 
       
    90 static oop* reference_next_addr(oop reference) {
       
    91   return (oop*)java_lang_ref_Reference::next_addr_raw(reference);
       
    92 }
       
    93 
       
    94 static oop reference_next(oop reference) {
       
    95   return *reference_next_addr(reference);
       
    96 }
       
    97 
       
    98 static void reference_set_next(oop reference, oop next) {
       
    99   java_lang_ref_Reference::set_next_raw(reference, next);
       
   100 }
       
   101 
       
   102 static void soft_reference_update_clock() {
       
   103   const jlong now = os::javaTimeNanos() / NANOSECS_PER_MILLISEC;
       
   104   java_lang_ref_SoftReference::set_clock(now);
       
   105 }
       
   106 
    42 ZReferenceProcessor::ZReferenceProcessor(ZWorkers* workers) :
   107 ZReferenceProcessor::ZReferenceProcessor(ZWorkers* workers) :
    43     _workers(workers),
   108     _workers(workers),
    44     _soft_reference_policy(NULL),
   109     _soft_reference_policy(NULL),
    45     _encountered_count(),
   110     _encountered_count(),
    46     _discovered_count(),
   111     _discovered_count(),
    52 void ZReferenceProcessor::set_soft_reference_policy(bool clear) {
   117 void ZReferenceProcessor::set_soft_reference_policy(bool clear) {
    53   static AlwaysClearPolicy always_clear_policy;
   118   static AlwaysClearPolicy always_clear_policy;
    54   static LRUMaxHeapPolicy lru_max_heap_policy;
   119   static LRUMaxHeapPolicy lru_max_heap_policy;
    55 
   120 
    56   if (clear) {
   121   if (clear) {
    57     log_info(gc, ref)("Clearing All Soft References");
   122     log_info(gc, ref)("Clearing All SoftReferences");
    58     _soft_reference_policy = &always_clear_policy;
   123     _soft_reference_policy = &always_clear_policy;
    59   } else {
   124   } else {
    60     _soft_reference_policy = &lru_max_heap_policy;
   125     _soft_reference_policy = &lru_max_heap_policy;
    61   }
   126   }
    62 
   127 
    63   _soft_reference_policy->setup();
   128   _soft_reference_policy->setup();
    64 }
   129 }
    65 
   130 
    66 void ZReferenceProcessor::update_soft_reference_clock() const {
   131 bool ZReferenceProcessor::is_inactive(oop reference, oop referent, ReferenceType type) const {
    67   const jlong now = os::javaTimeNanos() / NANOSECS_PER_MILLISEC;
   132   if (type == REF_FINAL) {
    68   java_lang_ref_SoftReference::set_clock(now);
   133     // A FinalReference is inactive if its next field is non-null. An application can't
    69 }
   134     // call enqueue() or clear() on a FinalReference.
    70 
   135     return reference_next(reference) != NULL;
    71 bool ZReferenceProcessor::is_inactive_final_reference(oop obj, ReferenceType type) const {
   136   } else {
    72   // A non-null next field for a FinalReference means the reference is inactive.
   137     // A non-FinalReference is inactive if the referent is null. The referent can only
    73   return (type == REF_FINAL) && (java_lang_ref_Reference::next(obj) != NULL);
   138     // be null if the application called Reference.enqueue() or Reference.clear().
    74 }
   139     return referent == NULL;
    75 
   140   }
    76 ReferenceType ZReferenceProcessor::reference_type(oop obj) const {
   141 }
    77   return InstanceKlass::cast(obj->klass())->reference_type();
   142 
    78 }
   143 bool ZReferenceProcessor::is_strongly_live(oop referent) const {
    79 
   144   return ZHeap::heap()->is_object_strongly_live(ZOop::to_address(referent));
    80 const char* ZReferenceProcessor::reference_type_name(ReferenceType type) const {
   145 }
    81   switch (type) {
   146 
    82   case REF_SOFT:
   147 bool ZReferenceProcessor::is_softly_live(oop reference, ReferenceType type) const {
    83     return "Soft";
   148   if (type != REF_SOFT) {
    84 
   149     // Not a SoftReference
    85   case REF_WEAK:
   150     return false;
    86     return "Weak";
   151   }
    87 
   152 
    88   case REF_FINAL:
   153   // Ask SoftReference policy
    89     return "Final";
   154   const jlong clock = java_lang_ref_SoftReference::clock();
    90 
   155   assert(clock != 0, "Clock not initialized");
    91   case REF_PHANTOM:
   156   assert(_soft_reference_policy != NULL, "Policy not initialized");
    92     return "Phantom";
   157   return !_soft_reference_policy->should_clear_reference(reference, clock);
    93 
   158 }
    94   default:
   159 
    95     ShouldNotReachHere();
   160 bool ZReferenceProcessor::should_discover(oop reference, ReferenceType type) const {
    96     return NULL;
   161   volatile oop* const referent_addr = reference_referent_addr(reference);
    97   }
   162   const oop referent = ZBarrier::weak_load_barrier_on_oop_field(referent_addr);
    98 }
   163 
    99 
   164   if (is_inactive(reference, referent, type)) {
   100 volatile oop* ZReferenceProcessor::reference_referent_addr(oop obj) const {
   165     return false;
   101   return (volatile oop*)java_lang_ref_Reference::referent_addr_raw(obj);
   166   }
   102 }
   167 
   103 
   168   if (is_strongly_live(referent)) {
   104 oop ZReferenceProcessor::reference_referent(oop obj) const {
   169     return false;
   105   return *reference_referent_addr(obj);
   170   }
   106 }
   171 
   107 
   172   if (is_softly_live(reference, type)) {
   108 bool ZReferenceProcessor::is_referent_strongly_alive_or_null(oop obj, ReferenceType type) const {
   173     return false;
   109   // Check if the referent is strongly alive or null, in which case we don't want to
   174   }
   110   // discover the reference. It can only be null if the application called
   175 
   111   // Reference.enqueue() or Reference.clear().
       
   112   //
       
   113   // PhantomReferences with finalizable marked referents should technically not have
   176   // PhantomReferences with finalizable marked referents should technically not have
   114   // to be discovered. However, InstanceRefKlass::oop_oop_iterate_ref_processing()
   177   // to be discovered. However, InstanceRefKlass::oop_oop_iterate_ref_processing()
   115   // does not know about the finalizable mark concept, and will therefore mark
   178   // does not know about the finalizable mark concept, and will therefore mark
   116   // referents in non-discovered PhantomReferences as strongly live. To prevent
   179   // referents in non-discovered PhantomReferences as strongly live. To prevent
   117   // this, we always discover PhantomReferences with finalizable marked referents.
   180   // this, we always discover PhantomReferences with finalizable marked referents.
   118   // They will automatically be dropped during the reference processing phase.
   181   // They will automatically be dropped during the reference processing phase.
   119 
   182   return true;
   120   volatile oop* const p = reference_referent_addr(obj);
   183 }
   121   const oop o = ZBarrier::weak_load_barrier_on_oop_field(p);
   184 
   122   return o == NULL || ZHeap::heap()->is_object_strongly_live(ZOop::to_address(o));
   185 bool ZReferenceProcessor::should_drop(oop reference, ReferenceType type) const {
   123 }
       
   124 
       
   125 bool ZReferenceProcessor::is_referent_softly_alive(oop obj, ReferenceType type) const {
       
   126   if (type != REF_SOFT) {
       
   127     // Not a soft reference
       
   128     return false;
       
   129   }
       
   130 
       
   131   // Ask soft reference policy
       
   132   const jlong clock = java_lang_ref_SoftReference::clock();
       
   133   assert(clock != 0, "Clock not initialized");
       
   134   assert(_soft_reference_policy != NULL, "Policy not initialized");
       
   135   return !_soft_reference_policy->should_clear_reference(obj, clock);
       
   136 }
       
   137 
       
   138 bool ZReferenceProcessor::should_drop_reference(oop obj, ReferenceType type) const {
       
   139   // This check is racing with a call to Reference.clear() from the application.
   186   // This check is racing with a call to Reference.clear() from the application.
   140   // If the application clears the reference after this check it will still end
   187   // If the application clears the reference after this check it will still end
   141   // up on the pending list, and there's nothing we can do about that without
   188   // up on the pending list, and there's nothing we can do about that without
   142   // changing the Reference.clear() API. This check is also racing with a call
   189   // changing the Reference.clear() API. This check is also racing with a call
   143   // to Reference.enqueue() from the application, which is unproblematic, since
   190   // to Reference.enqueue() from the application, which is unproblematic, since
   144   // the application wants the reference to be enqueued anyway.
   191   // the application wants the reference to be enqueued anyway.
   145   const oop o = reference_referent(obj);
   192   const oop referent = reference_referent(reference);
   146   if (o == NULL) {
   193   if (referent == NULL) {
   147     // Reference has been cleared, by a call to Reference.enqueue()
   194     // Reference has been cleared, by a call to Reference.enqueue()
   148     // or Reference.clear() from the application, which means we
   195     // or Reference.clear() from the application, which means we
   149     // should drop the reference.
   196     // should drop the reference.
   150     return true;
   197     return true;
   151   }
   198   }
   152 
   199 
   153   // Check if the referent is still alive, in which case we should
   200   // Check if the referent is still alive, in which case we should
   154   // drop the reference.
   201   // drop the reference.
   155   if (type == REF_PHANTOM) {
   202   if (type == REF_PHANTOM) {
   156     return ZBarrier::is_alive_barrier_on_phantom_oop(o);
   203     return ZBarrier::is_alive_barrier_on_phantom_oop(referent);
   157   } else {
   204   } else {
   158     return ZBarrier::is_alive_barrier_on_weak_oop(o);
   205     return ZBarrier::is_alive_barrier_on_weak_oop(referent);
   159   }
   206   }
   160 }
   207 }
   161 
   208 
   162 bool ZReferenceProcessor::should_mark_referent(ReferenceType type) const {
   209 void ZReferenceProcessor::keep_alive(oop reference, ReferenceType type) const {
   163   // Referents of final references (and its reachable sub graph) are
   210   volatile oop* const p = reference_referent_addr(reference);
   164   // always marked finalizable during discovery. This avoids the problem
       
   165   // of later having to mark those objects if the referent is still final
       
   166   // reachable during processing.
       
   167   return type == REF_FINAL;
       
   168 }
       
   169 
       
   170 void ZReferenceProcessor::keep_referent_alive(oop obj, ReferenceType type) const {
       
   171   volatile oop* const p = reference_referent_addr(obj);
       
   172   if (type == REF_PHANTOM) {
   211   if (type == REF_PHANTOM) {
   173     ZBarrier::keep_alive_barrier_on_phantom_oop_field(p);
   212     ZBarrier::keep_alive_barrier_on_phantom_oop_field(p);
   174   } else {
   213   } else {
   175     ZBarrier::keep_alive_barrier_on_weak_oop_field(p);
   214     ZBarrier::keep_alive_barrier_on_weak_oop_field(p);
   176   }
   215   }
   177 }
   216 }
   178 
   217 
   179 bool ZReferenceProcessor::discover_reference(oop obj, ReferenceType type) {
   218 void ZReferenceProcessor::make_inactive(oop reference, ReferenceType type) const {
       
   219   if (type == REF_FINAL) {
       
   220     // Don't clear referent. It is needed by the Finalizer thread to make the call
       
   221     // to finalize(). A FinalReference is instead made inactive by self-looping the
       
   222     // next field. An application can't call FinalReference.enqueue(), so there is
       
   223     // no race to worry about when setting the next field.
       
   224     assert(reference_next(reference) == NULL, "Already inactive");
       
   225     reference_set_next(reference, reference);
       
   226   } else {
       
   227     // Clear referent
       
   228     reference_set_referent(reference, NULL);
       
   229   }
       
   230 }
       
   231 
       
   232 void ZReferenceProcessor::discover(oop reference, ReferenceType type) {
       
   233   log_trace(gc, ref)("Discovered Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
       
   234 
       
   235   // Update statistics
       
   236   _discovered_count.get()[type]++;
       
   237 
       
   238   if (type == REF_FINAL) {
       
   239     // Mark referent (and its reachable subgraph) finalizable. This avoids
       
   240     // the problem of later having to mark those objects if the referent is
       
   241     // still final reachable during processing.
       
   242     volatile oop* const referent_addr = reference_referent_addr(reference);
       
   243     ZBarrier::mark_barrier_on_oop_field(referent_addr, true /* finalizable */);
       
   244   }
       
   245 
       
   246   // Add reference to discovered list
       
   247   assert(reference_discovered(reference) == NULL, "Already discovered");
       
   248   oop* const list = _discovered_list.addr();
       
   249   reference_set_discovered(reference, *list);
       
   250   *list = reference;
       
   251 }
       
   252 
       
   253 bool ZReferenceProcessor::discover_reference(oop reference, ReferenceType type) {
   180   if (!RegisterReferences) {
   254   if (!RegisterReferences) {
   181     // Reference processing disabled
   255     // Reference processing disabled
   182     return false;
   256     return false;
   183   }
   257   }
   184 
   258 
   185   log_trace(gc, ref)("Encountered Reference: " PTR_FORMAT " (%s)", p2i(obj), reference_type_name(type));
   259   log_trace(gc, ref)("Encountered Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
   186 
   260 
   187   // Update statistics
   261   // Update statistics
   188   _encountered_count.get()[type]++;
   262   _encountered_count.get()[type]++;
   189 
   263 
   190   if (is_referent_strongly_alive_or_null(obj, type) ||
   264   if (!should_discover(reference, type)) {
   191       is_inactive_final_reference(obj, type) ||
       
   192       is_referent_softly_alive(obj, type)) {
       
   193     // Not discovered
   265     // Not discovered
   194     return false;
   266     return false;
   195   }
   267   }
   196 
   268 
   197   discover(obj, type);
   269   discover(reference, type);
   198 
   270 
   199   // Discovered
   271   // Discovered
   200   return true;
   272   return true;
   201 }
   273 }
   202 
   274 
   203 void ZReferenceProcessor::discover(oop obj, ReferenceType type) {
   275 oop ZReferenceProcessor::drop(oop reference, ReferenceType type) {
   204   log_trace(gc, ref)("Discovered Reference: " PTR_FORMAT " (%s)", p2i(obj), reference_type_name(type));
   276   log_trace(gc, ref)("Dropped Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
   205 
       
   206   // Update statistics
       
   207   _discovered_count.get()[type]++;
       
   208 
       
   209   // Mark referent finalizable
       
   210   if (should_mark_referent(type)) {
       
   211     oop* const referent_addr = (oop*)java_lang_ref_Reference::referent_addr_raw(obj);
       
   212     ZBarrier::mark_barrier_on_oop_field(referent_addr, true /* finalizable */);
       
   213   }
       
   214 
       
   215   // Add reference to discovered list
       
   216   assert(java_lang_ref_Reference::discovered(obj) == NULL, "Already discovered");
       
   217   oop* const list = _discovered_list.addr();
       
   218   java_lang_ref_Reference::set_discovered(obj, *list);
       
   219   *list = obj;
       
   220 }
       
   221 
       
   222 oop ZReferenceProcessor::drop(oop obj, ReferenceType type) {
       
   223   log_trace(gc, ref)("Dropped Reference: " PTR_FORMAT " (%s)", p2i(obj), reference_type_name(type));
       
   224 
   277 
   225   // Keep referent alive
   278   // Keep referent alive
   226   keep_referent_alive(obj, type);
   279   keep_alive(reference, type);
   227 
   280 
   228   // Unlink and return next in list
   281   // Unlink and return next in list
   229   const oop next = java_lang_ref_Reference::discovered(obj);
   282   const oop next = reference_discovered(reference);
   230   java_lang_ref_Reference::set_discovered(obj, NULL);
   283   reference_set_discovered(reference, NULL);
   231   return next;
   284   return next;
   232 }
   285 }
   233 
   286 
   234 oop* ZReferenceProcessor::keep(oop obj, ReferenceType type) {
   287 oop* ZReferenceProcessor::keep(oop reference, ReferenceType type) {
   235   log_trace(gc, ref)("Enqueued Reference: " PTR_FORMAT " (%s)", p2i(obj), reference_type_name(type));
   288   log_trace(gc, ref)("Enqueued Reference: " PTR_FORMAT " (%s)", p2i(reference), reference_type_name(type));
   236 
   289 
   237   // Update statistics
   290   // Update statistics
   238   _enqueued_count.get()[type]++;
   291   _enqueued_count.get()[type]++;
   239 
   292 
   240   if (type != REF_FINAL) {
   293   // Make reference inactive
   241     // Clear referent
   294   make_inactive(reference, type);
   242     java_lang_ref_Reference::set_referent(obj, NULL);
       
   243   } else {
       
   244     // For a FinalReference, don't clear the referent, because it is
       
   245     // needed for the finalize call.  Instead, make the reference
       
   246     // inactive by self-looping the 'next' field.  FinalReference
       
   247     // doesn't allow Reference.enqueue, so there's no race to worry
       
   248     // about when setting 'next'.
       
   249     assert(java_lang_ref_Reference::next(obj) == NULL, "enqueued FinalReference");
       
   250     java_lang_ref_Reference::set_next_raw(obj, obj);
       
   251   }
       
   252 
   295 
   253   // Return next in list
   296   // Return next in list
   254   return (oop*)java_lang_ref_Reference::discovered_addr_raw(obj);
   297   return reference_discovered_addr(reference);
   255 }
   298 }
   256 
   299 
   257 void ZReferenceProcessor::work() {
   300 void ZReferenceProcessor::work() {
   258   // Process discovered references
   301   // Process discovered references
   259   oop* const list = _discovered_list.addr();
   302   oop* const list = _discovered_list.addr();
   260   oop* p = list;
   303   oop* p = list;
   261 
   304 
   262   while (*p != NULL) {
   305   while (*p != NULL) {
   263     const oop obj = *p;
   306     const oop reference = *p;
   264     const ReferenceType type = reference_type(obj);
   307     const ReferenceType type = reference_type(reference);
   265 
   308 
   266     if (should_drop_reference(obj, type)) {
   309     if (should_drop(reference, type)) {
   267       *p = drop(obj, type);
   310       *p = drop(reference, type);
   268     } else {
   311     } else {
   269       p = keep(obj, type);
   312       p = keep(reference, type);
   270     }
   313     }
   271   }
   314   }
   272 
   315 
   273   // Prepend discovered references to internal pending list
   316   // Prepend discovered references to internal pending list
   274   if (*list != NULL) {
   317   if (*list != NULL) {
   388 
   431 
   389   // Process discovered lists
   432   // Process discovered lists
   390   ZReferenceProcessorTask task(this);
   433   ZReferenceProcessorTask task(this);
   391   _workers->run_concurrent(&task);
   434   _workers->run_concurrent(&task);
   392 
   435 
   393   // Update soft reference clock
   436   // Update SoftReference clock
   394   update_soft_reference_clock();
   437   soft_reference_update_clock();
   395 
   438 
   396   // Collect, log and trace statistics
   439   // Collect, log and trace statistics
   397   collect_statistics();
   440   collect_statistics();
   398 }
   441 }
   399 
   442