26 # include "incls/_referenceProcessor.cpp.incl" |
26 # include "incls/_referenceProcessor.cpp.incl" |
27 |
27 |
28 // List of discovered references. |
28 // List of discovered references. |
29 class DiscoveredList { |
29 class DiscoveredList { |
30 public: |
30 public: |
31 DiscoveredList() : _head(NULL), _len(0) { } |
31 DiscoveredList() : _len(0), _compressed_head(0), _oop_head(NULL) { } |
32 oop head() const { return _head; } |
32 oop head() const { |
33 oop* head_ptr() { return &_head; } |
33 return UseCompressedOops ? oopDesc::decode_heap_oop_not_null(_compressed_head) : |
34 void set_head(oop o) { _head = o; } |
34 _oop_head; |
35 bool empty() const { return _head == ReferenceProcessor::_sentinelRef; } |
35 } |
|
36 HeapWord* adr_head() { |
|
37 return UseCompressedOops ? (HeapWord*)&_compressed_head : |
|
38 (HeapWord*)&_oop_head; |
|
39 } |
|
40 void set_head(oop o) { |
|
41 if (UseCompressedOops) { |
|
42 // Must compress the head ptr. |
|
43 _compressed_head = oopDesc::encode_heap_oop_not_null(o); |
|
44 } else { |
|
45 _oop_head = o; |
|
46 } |
|
47 } |
|
48 bool empty() const { return head() == ReferenceProcessor::sentinel_ref(); } |
36 size_t length() { return _len; } |
49 size_t length() { return _len; } |
37 void set_length(size_t len) { _len = len; } |
50 void set_length(size_t len) { _len = len; } |
38 private: |
51 private: |
|
52 // Set value depending on UseCompressedOops. This could be a template class |
|
53 // but then we have to fix all the instantiations and declarations that use this class. |
|
54 oop _oop_head; |
|
55 narrowOop _compressed_head; |
39 size_t _len; |
56 size_t _len; |
40 oop _head; |
|
41 }; |
57 }; |
42 |
58 |
43 oop ReferenceProcessor::_sentinelRef = NULL; |
59 oop ReferenceProcessor::_sentinelRef = NULL; |
44 |
60 |
45 const int subclasses_of_ref = REF_PHANTOM - REF_OTHER; |
61 const int subclasses_of_ref = REF_PHANTOM - REF_OTHER; |
47 void referenceProcessor_init() { |
63 void referenceProcessor_init() { |
48 ReferenceProcessor::init_statics(); |
64 ReferenceProcessor::init_statics(); |
49 } |
65 } |
50 |
66 |
51 void ReferenceProcessor::init_statics() { |
67 void ReferenceProcessor::init_statics() { |
52 assert(_sentinelRef == NULL, "should be initialized precsiely once"); |
68 assert(_sentinelRef == NULL, "should be initialized precisely once"); |
53 EXCEPTION_MARK; |
69 EXCEPTION_MARK; |
54 _sentinelRef = instanceKlass::cast( |
70 _sentinelRef = instanceKlass::cast( |
55 SystemDictionary::object_klass())-> |
71 SystemDictionary::reference_klass())-> |
56 allocate_permanent_instance(THREAD); |
72 allocate_permanent_instance(THREAD); |
57 |
73 |
58 // Initialize the master soft ref clock. |
74 // Initialize the master soft ref clock. |
59 java_lang_ref_SoftReference::set_clock(os::javaTimeMillis()); |
75 java_lang_ref_SoftReference::set_clock(os::javaTimeMillis()); |
60 |
76 |
61 if (HAS_PENDING_EXCEPTION) { |
77 if (HAS_PENDING_EXCEPTION) { |
67 guarantee(RefDiscoveryPolicy == ReferenceBasedDiscovery || |
83 guarantee(RefDiscoveryPolicy == ReferenceBasedDiscovery || |
68 RefDiscoveryPolicy == ReferentBasedDiscovery, |
84 RefDiscoveryPolicy == ReferentBasedDiscovery, |
69 "Unrecongnized RefDiscoveryPolicy"); |
85 "Unrecongnized RefDiscoveryPolicy"); |
70 } |
86 } |
71 |
87 |
72 |
88 ReferenceProcessor* |
73 ReferenceProcessor* ReferenceProcessor::create_ref_processor( |
89 ReferenceProcessor::create_ref_processor(MemRegion span, |
74 MemRegion span, |
90 bool atomic_discovery, |
75 bool atomic_discovery, |
91 bool mt_discovery, |
76 bool mt_discovery, |
92 BoolObjectClosure* is_alive_non_header, |
77 BoolObjectClosure* is_alive_non_header, |
93 int parallel_gc_threads, |
78 int parallel_gc_threads, |
94 bool mt_processing) { |
79 bool mt_processing) |
|
80 { |
|
81 int mt_degree = 1; |
95 int mt_degree = 1; |
82 if (parallel_gc_threads > 1) { |
96 if (parallel_gc_threads > 1) { |
83 mt_degree = parallel_gc_threads; |
97 mt_degree = parallel_gc_threads; |
84 } |
98 } |
85 ReferenceProcessor* rp = |
99 ReferenceProcessor* rp = |
91 } |
105 } |
92 rp->set_is_alive_non_header(is_alive_non_header); |
106 rp->set_is_alive_non_header(is_alive_non_header); |
93 return rp; |
107 return rp; |
94 } |
108 } |
95 |
109 |
96 |
|
97 ReferenceProcessor::ReferenceProcessor(MemRegion span, |
110 ReferenceProcessor::ReferenceProcessor(MemRegion span, |
98 bool atomic_discovery, bool mt_discovery, int mt_degree, |
111 bool atomic_discovery, |
99 bool mt_processing) : |
112 bool mt_discovery, |
|
113 int mt_degree, |
|
114 bool mt_processing) : |
100 _discovering_refs(false), |
115 _discovering_refs(false), |
101 _enqueuing_is_done(false), |
116 _enqueuing_is_done(false), |
102 _is_alive_non_header(NULL), |
117 _is_alive_non_header(NULL), |
103 _processing_is_mt(mt_processing), |
118 _processing_is_mt(mt_processing), |
104 _next_id(0) |
119 _next_id(0) |
112 vm_exit_during_initialization("Could not allocated RefProc Array"); |
127 vm_exit_during_initialization("Could not allocated RefProc Array"); |
113 } |
128 } |
114 _discoveredWeakRefs = &_discoveredSoftRefs[_num_q]; |
129 _discoveredWeakRefs = &_discoveredSoftRefs[_num_q]; |
115 _discoveredFinalRefs = &_discoveredWeakRefs[_num_q]; |
130 _discoveredFinalRefs = &_discoveredWeakRefs[_num_q]; |
116 _discoveredPhantomRefs = &_discoveredFinalRefs[_num_q]; |
131 _discoveredPhantomRefs = &_discoveredFinalRefs[_num_q]; |
117 assert(_sentinelRef != NULL, "_sentinelRef is NULL"); |
132 assert(sentinel_ref() != NULL, "_sentinelRef is NULL"); |
118 // Initialized all entries to _sentinelRef |
133 // Initialized all entries to _sentinelRef |
119 for (int i = 0; i < _num_q * subclasses_of_ref; i++) { |
134 for (int i = 0; i < _num_q * subclasses_of_ref; i++) { |
120 _discoveredSoftRefs[i].set_head(_sentinelRef); |
135 _discoveredSoftRefs[i].set_head(sentinel_ref()); |
121 _discoveredSoftRefs[i].set_length(0); |
136 _discoveredSoftRefs[i].set_length(0); |
122 } |
137 } |
123 } |
138 } |
124 |
139 |
125 #ifndef PRODUCT |
140 #ifndef PRODUCT |
132 } |
147 } |
133 #endif |
148 #endif |
134 |
149 |
135 void ReferenceProcessor::weak_oops_do(OopClosure* f) { |
150 void ReferenceProcessor::weak_oops_do(OopClosure* f) { |
136 for (int i = 0; i < _num_q * subclasses_of_ref; i++) { |
151 for (int i = 0; i < _num_q * subclasses_of_ref; i++) { |
137 f->do_oop(_discoveredSoftRefs[i].head_ptr()); |
152 if (UseCompressedOops) { |
|
153 f->do_oop((narrowOop*)_discoveredSoftRefs[i].adr_head()); |
|
154 } else { |
|
155 f->do_oop((oop*)_discoveredSoftRefs[i].adr_head()); |
|
156 } |
138 } |
157 } |
139 } |
158 } |
140 |
159 |
141 void ReferenceProcessor::oops_do(OopClosure* f) { |
160 void ReferenceProcessor::oops_do(OopClosure* f) { |
142 f->do_oop(&_sentinelRef); |
161 f->do_oop(adr_sentinel_ref()); |
143 } |
162 } |
144 |
163 |
145 void ReferenceProcessor::update_soft_ref_master_clock() |
164 void ReferenceProcessor::update_soft_ref_master_clock() { |
146 { |
|
147 // Update (advance) the soft ref master clock field. This must be done |
165 // Update (advance) the soft ref master clock field. This must be done |
148 // after processing the soft ref list. |
166 // after processing the soft ref list. |
149 jlong now = os::javaTimeMillis(); |
167 jlong now = os::javaTimeMillis(); |
150 jlong clock = java_lang_ref_SoftReference::clock(); |
168 jlong clock = java_lang_ref_SoftReference::clock(); |
151 NOT_PRODUCT( |
169 NOT_PRODUCT( |
162 } |
180 } |
163 // Else leave clock stalled at its old value until time progresses |
181 // Else leave clock stalled at its old value until time progresses |
164 // past clock value. |
182 // past clock value. |
165 } |
183 } |
166 |
184 |
167 |
185 void ReferenceProcessor::process_discovered_references( |
168 void |
|
169 ReferenceProcessor::process_discovered_references( |
|
170 ReferencePolicy* policy, |
186 ReferencePolicy* policy, |
171 BoolObjectClosure* is_alive, |
187 BoolObjectClosure* is_alive, |
172 OopClosure* keep_alive, |
188 OopClosure* keep_alive, |
173 VoidClosure* complete_gc, |
189 VoidClosure* complete_gc, |
174 AbstractRefProcTaskExecutor* task_executor) { |
190 AbstractRefProcTaskExecutor* task_executor) { |
221 } |
237 } |
222 process_phaseJNI(is_alive, keep_alive, complete_gc); |
238 process_phaseJNI(is_alive, keep_alive, complete_gc); |
223 } |
239 } |
224 } |
240 } |
225 |
241 |
226 |
|
227 #ifndef PRODUCT |
242 #ifndef PRODUCT |
228 // Calculate the number of jni handles. |
243 // Calculate the number of jni handles. |
229 unsigned int ReferenceProcessor::count_jni_refs() |
244 uint ReferenceProcessor::count_jni_refs() { |
230 { |
|
231 class AlwaysAliveClosure: public BoolObjectClosure { |
245 class AlwaysAliveClosure: public BoolObjectClosure { |
232 public: |
246 public: |
233 bool do_object_b(oop obj) { return true; } |
247 virtual bool do_object_b(oop obj) { return true; } |
234 void do_object(oop obj) { assert(false, "Don't call"); } |
248 virtual void do_object(oop obj) { assert(false, "Don't call"); } |
235 }; |
249 }; |
236 |
250 |
237 class CountHandleClosure: public OopClosure { |
251 class CountHandleClosure: public OopClosure { |
238 private: |
252 private: |
239 int _count; |
253 int _count; |
240 public: |
254 public: |
241 CountHandleClosure(): _count(0) {} |
255 CountHandleClosure(): _count(0) {} |
242 void do_oop(oop* unused) { |
256 void do_oop(oop* unused) { _count++; } |
243 _count++; |
257 void do_oop(narrowOop* unused) { ShouldNotReachHere(); } |
244 } |
|
245 int count() { return _count; } |
258 int count() { return _count; } |
246 }; |
259 }; |
247 CountHandleClosure global_handle_count; |
260 CountHandleClosure global_handle_count; |
248 AlwaysAliveClosure always_alive; |
261 AlwaysAliveClosure always_alive; |
249 JNIHandles::weak_oops_do(&always_alive, &global_handle_count); |
262 JNIHandles::weak_oops_do(&always_alive, &global_handle_count); |
260 gclog_or_tty->print(", %u refs", count); |
273 gclog_or_tty->print(", %u refs", count); |
261 } |
274 } |
262 #endif |
275 #endif |
263 JNIHandles::weak_oops_do(is_alive, keep_alive); |
276 JNIHandles::weak_oops_do(is_alive, keep_alive); |
264 // Finally remember to keep sentinel around |
277 // Finally remember to keep sentinel around |
265 keep_alive->do_oop(&_sentinelRef); |
278 keep_alive->do_oop(adr_sentinel_ref()); |
266 complete_gc->do_void(); |
279 complete_gc->do_void(); |
267 } |
280 } |
268 |
281 |
269 bool ReferenceProcessor::enqueue_discovered_references(AbstractRefProcTaskExecutor* task_executor) { |
282 |
270 NOT_PRODUCT(verify_ok_to_handle_reflists()); |
283 template <class T> |
|
284 static bool enqueue_discovered_ref_helper(ReferenceProcessor* ref, |
|
285 AbstractRefProcTaskExecutor* task_executor) { |
|
286 |
271 // Remember old value of pending references list |
287 // Remember old value of pending references list |
272 oop* pending_list_addr = java_lang_ref_Reference::pending_list_addr(); |
288 T* pending_list_addr = (T*)java_lang_ref_Reference::pending_list_addr(); |
273 oop old_pending_list_value = *pending_list_addr; |
289 T old_pending_list_value = *pending_list_addr; |
274 |
290 |
275 // Enqueue references that are not made active again, and |
291 // Enqueue references that are not made active again, and |
276 // clear the decks for the next collection (cycle). |
292 // clear the decks for the next collection (cycle). |
277 enqueue_discovered_reflists(pending_list_addr, task_executor); |
293 ref->enqueue_discovered_reflists((HeapWord*)pending_list_addr, task_executor); |
278 // Do the oop-check on pending_list_addr missed in |
294 // Do the oop-check on pending_list_addr missed in |
279 // enqueue_discovered_reflist. We should probably |
295 // enqueue_discovered_reflist. We should probably |
280 // do a raw oop_check so that future such idempotent |
296 // do a raw oop_check so that future such idempotent |
281 // oop_stores relying on the oop-check side-effect |
297 // oop_stores relying on the oop-check side-effect |
282 // may be elided automatically and safely without |
298 // may be elided automatically and safely without |
283 // affecting correctness. |
299 // affecting correctness. |
284 oop_store(pending_list_addr, *(pending_list_addr)); |
300 oop_store(pending_list_addr, oopDesc::load_decode_heap_oop(pending_list_addr)); |
285 |
301 |
286 // Stop treating discovered references specially. |
302 // Stop treating discovered references specially. |
287 disable_discovery(); |
303 ref->disable_discovery(); |
288 |
304 |
289 // Return true if new pending references were added |
305 // Return true if new pending references were added |
290 return old_pending_list_value != *pending_list_addr; |
306 return old_pending_list_value != *pending_list_addr; |
291 } |
307 } |
292 |
308 |
|
309 bool ReferenceProcessor::enqueue_discovered_references(AbstractRefProcTaskExecutor* task_executor) { |
|
310 NOT_PRODUCT(verify_ok_to_handle_reflists()); |
|
311 if (UseCompressedOops) { |
|
312 return enqueue_discovered_ref_helper<narrowOop>(this, task_executor); |
|
313 } else { |
|
314 return enqueue_discovered_ref_helper<oop>(this, task_executor); |
|
315 } |
|
316 } |
|
317 |
293 void ReferenceProcessor::enqueue_discovered_reflist(DiscoveredList& refs_list, |
318 void ReferenceProcessor::enqueue_discovered_reflist(DiscoveredList& refs_list, |
294 oop* pending_list_addr) { |
319 HeapWord* pending_list_addr) { |
295 // Given a list of refs linked through the "discovered" field |
320 // Given a list of refs linked through the "discovered" field |
296 // (java.lang.ref.Reference.discovered) chain them through the |
321 // (java.lang.ref.Reference.discovered) chain them through the |
297 // "next" field (java.lang.ref.Reference.next) and prepend |
322 // "next" field (java.lang.ref.Reference.next) and prepend |
298 // to the pending list. |
323 // to the pending list. |
299 if (TraceReferenceGC && PrintGCDetails) { |
324 if (TraceReferenceGC && PrintGCDetails) { |
303 oop obj = refs_list.head(); |
328 oop obj = refs_list.head(); |
304 // Walk down the list, copying the discovered field into |
329 // Walk down the list, copying the discovered field into |
305 // the next field and clearing it (except for the last |
330 // the next field and clearing it (except for the last |
306 // non-sentinel object which is treated specially to avoid |
331 // non-sentinel object which is treated specially to avoid |
307 // confusion with an active reference). |
332 // confusion with an active reference). |
308 while (obj != _sentinelRef) { |
333 while (obj != sentinel_ref()) { |
309 assert(obj->is_instanceRef(), "should be reference object"); |
334 assert(obj->is_instanceRef(), "should be reference object"); |
310 oop next = java_lang_ref_Reference::discovered(obj); |
335 oop next = java_lang_ref_Reference::discovered(obj); |
311 if (TraceReferenceGC && PrintGCDetails) { |
336 if (TraceReferenceGC && PrintGCDetails) { |
312 gclog_or_tty->print_cr(" obj " INTPTR_FORMAT "/next " INTPTR_FORMAT, |
337 gclog_or_tty->print_cr(" obj " INTPTR_FORMAT "/next " INTPTR_FORMAT, |
313 (oopDesc*) obj, (oopDesc*) next); |
338 obj, next); |
314 } |
339 } |
315 assert(*java_lang_ref_Reference::next_addr(obj) == NULL, |
340 assert(java_lang_ref_Reference::next(obj) == NULL, |
316 "The reference should not be enqueued"); |
341 "The reference should not be enqueued"); |
317 if (next == _sentinelRef) { // obj is last |
342 if (next == sentinel_ref()) { // obj is last |
318 // Swap refs_list into pendling_list_addr and |
343 // Swap refs_list into pendling_list_addr and |
319 // set obj's next to what we read from pending_list_addr. |
344 // set obj's next to what we read from pending_list_addr. |
320 oop old = (oop)Atomic::xchg_ptr(refs_list.head(), pending_list_addr); |
345 oop old = oopDesc::atomic_exchange_oop(refs_list.head(), pending_list_addr); |
321 // Need oop_check on pending_list_addr above; |
346 // Need oop_check on pending_list_addr above; |
322 // see special oop-check code at the end of |
347 // see special oop-check code at the end of |
323 // enqueue_discovered_reflists() further below. |
348 // enqueue_discovered_reflists() further below. |
324 if (old == NULL) { |
349 if (old == NULL) { |
325 // obj should be made to point to itself, since |
350 // obj should be made to point to itself, since |
339 // Parallel enqueue task |
364 // Parallel enqueue task |
340 class RefProcEnqueueTask: public AbstractRefProcTaskExecutor::EnqueueTask { |
365 class RefProcEnqueueTask: public AbstractRefProcTaskExecutor::EnqueueTask { |
341 public: |
366 public: |
342 RefProcEnqueueTask(ReferenceProcessor& ref_processor, |
367 RefProcEnqueueTask(ReferenceProcessor& ref_processor, |
343 DiscoveredList discovered_refs[], |
368 DiscoveredList discovered_refs[], |
344 oop* pending_list_addr, |
369 HeapWord* pending_list_addr, |
345 oop sentinel_ref, |
370 oop sentinel_ref, |
346 int n_queues) |
371 int n_queues) |
347 : EnqueueTask(ref_processor, discovered_refs, |
372 : EnqueueTask(ref_processor, discovered_refs, |
348 pending_list_addr, sentinel_ref, n_queues) |
373 pending_list_addr, sentinel_ref, n_queues) |
349 { } |
374 { } |
350 |
375 |
351 virtual void work(unsigned int work_id) |
376 virtual void work(unsigned int work_id) { |
352 { |
|
353 assert(work_id < (unsigned int)_ref_processor.num_q(), "Index out-of-bounds"); |
377 assert(work_id < (unsigned int)_ref_processor.num_q(), "Index out-of-bounds"); |
354 // Simplest first cut: static partitioning. |
378 // Simplest first cut: static partitioning. |
355 int index = work_id; |
379 int index = work_id; |
356 for (int j = 0; j < subclasses_of_ref; j++, index += _n_queues) { |
380 for (int j = 0; j < subclasses_of_ref; j++, index += _n_queues) { |
357 _ref_processor.enqueue_discovered_reflist( |
381 _ref_processor.enqueue_discovered_reflist( |
361 } |
385 } |
362 } |
386 } |
363 }; |
387 }; |
364 |
388 |
365 // Enqueue references that are not made active again |
389 // Enqueue references that are not made active again |
366 void ReferenceProcessor::enqueue_discovered_reflists(oop* pending_list_addr, |
390 void ReferenceProcessor::enqueue_discovered_reflists(HeapWord* pending_list_addr, |
367 AbstractRefProcTaskExecutor* task_executor) { |
391 AbstractRefProcTaskExecutor* task_executor) { |
368 if (_processing_is_mt && task_executor != NULL) { |
392 if (_processing_is_mt && task_executor != NULL) { |
369 // Parallel code |
393 // Parallel code |
370 RefProcEnqueueTask tsk(*this, _discoveredSoftRefs, |
394 RefProcEnqueueTask tsk(*this, _discoveredSoftRefs, |
371 pending_list_addr, _sentinelRef, _num_q); |
395 pending_list_addr, sentinel_ref(), _num_q); |
372 task_executor->execute(tsk); |
396 task_executor->execute(tsk); |
373 } else { |
397 } else { |
374 // Serial code: call the parent class's implementation |
398 // Serial code: call the parent class's implementation |
375 for (int i = 0; i < _num_q * subclasses_of_ref; i++) { |
399 for (int i = 0; i < _num_q * subclasses_of_ref; i++) { |
376 enqueue_discovered_reflist(_discoveredSoftRefs[i], pending_list_addr); |
400 enqueue_discovered_reflist(_discoveredSoftRefs[i], pending_list_addr); |
377 _discoveredSoftRefs[i].set_head(_sentinelRef); |
401 _discoveredSoftRefs[i].set_head(sentinel_ref()); |
378 _discoveredSoftRefs[i].set_length(0); |
402 _discoveredSoftRefs[i].set_length(0); |
379 } |
403 } |
380 } |
404 } |
381 } |
405 } |
382 |
406 |
386 inline DiscoveredListIterator(DiscoveredList& refs_list, |
410 inline DiscoveredListIterator(DiscoveredList& refs_list, |
387 OopClosure* keep_alive, |
411 OopClosure* keep_alive, |
388 BoolObjectClosure* is_alive); |
412 BoolObjectClosure* is_alive); |
389 |
413 |
390 // End Of List. |
414 // End Of List. |
391 inline bool has_next() const |
415 inline bool has_next() const { return _next != ReferenceProcessor::sentinel_ref(); } |
392 { return _next != ReferenceProcessor::_sentinelRef; } |
|
393 |
416 |
394 // Get oop to the Reference object. |
417 // Get oop to the Reference object. |
395 inline oop obj() const { return _ref; } |
418 inline oop obj() const { return _ref; } |
396 |
419 |
397 // Get oop to the referent object. |
420 // Get oop to the referent object. |
398 inline oop referent() const { return _referent; } |
421 inline oop referent() const { return _referent; } |
399 |
422 |
400 // Returns true if referent is alive. |
423 // Returns true if referent is alive. |
401 inline bool is_referent_alive() const; |
424 inline bool is_referent_alive() const; |
402 |
425 |
403 // Loads data for the current reference. |
426 // Loads data for the current reference. |
415 |
438 |
416 // Make the Reference object active again. |
439 // Make the Reference object active again. |
417 inline void make_active() { java_lang_ref_Reference::set_next(_ref, NULL); } |
440 inline void make_active() { java_lang_ref_Reference::set_next(_ref, NULL); } |
418 |
441 |
419 // Make the referent alive. |
442 // Make the referent alive. |
420 inline void make_referent_alive() { _keep_alive->do_oop(_referent_addr); } |
443 inline void make_referent_alive() { |
|
444 if (UseCompressedOops) { |
|
445 _keep_alive->do_oop((narrowOop*)_referent_addr); |
|
446 } else { |
|
447 _keep_alive->do_oop((oop*)_referent_addr); |
|
448 } |
|
449 } |
421 |
450 |
422 // Update the discovered field. |
451 // Update the discovered field. |
423 inline void update_discovered() { _keep_alive->do_oop(_prev_next); } |
452 inline void update_discovered() { |
|
453 // First _prev_next ref actually points into DiscoveredList (gross). |
|
454 if (UseCompressedOops) { |
|
455 _keep_alive->do_oop((narrowOop*)_prev_next); |
|
456 } else { |
|
457 _keep_alive->do_oop((oop*)_prev_next); |
|
458 } |
|
459 } |
424 |
460 |
425 // NULL out referent pointer. |
461 // NULL out referent pointer. |
426 inline void clear_referent() { *_referent_addr = NULL; } |
462 inline void clear_referent() { oop_store_raw(_referent_addr, NULL); } |
427 |
463 |
428 // Statistics |
464 // Statistics |
429 NOT_PRODUCT( |
465 NOT_PRODUCT( |
430 inline size_t processed() const { return _processed; } |
466 inline size_t processed() const { return _processed; } |
431 inline size_t removed() const { return _removed; } |
467 inline size_t removed() const { return _removed; } |
455 |
491 |
456 inline DiscoveredListIterator::DiscoveredListIterator(DiscoveredList& refs_list, |
492 inline DiscoveredListIterator::DiscoveredListIterator(DiscoveredList& refs_list, |
457 OopClosure* keep_alive, |
493 OopClosure* keep_alive, |
458 BoolObjectClosure* is_alive) |
494 BoolObjectClosure* is_alive) |
459 : _refs_list(refs_list), |
495 : _refs_list(refs_list), |
460 _prev_next(refs_list.head_ptr()), |
496 _prev_next(refs_list.adr_head()), |
461 _ref(refs_list.head()), |
497 _ref(refs_list.head()), |
462 #ifdef ASSERT |
498 #ifdef ASSERT |
463 _first_seen(refs_list.head()), |
499 _first_seen(refs_list.head()), |
464 #endif |
500 #endif |
465 #ifndef PRODUCT |
501 #ifndef PRODUCT |
469 _next(refs_list.head()), |
505 _next(refs_list.head()), |
470 _keep_alive(keep_alive), |
506 _keep_alive(keep_alive), |
471 _is_alive(is_alive) |
507 _is_alive(is_alive) |
472 { } |
508 { } |
473 |
509 |
474 inline bool DiscoveredListIterator::is_referent_alive() const |
510 inline bool DiscoveredListIterator::is_referent_alive() const { |
475 { |
|
476 return _is_alive->do_object_b(_referent); |
511 return _is_alive->do_object_b(_referent); |
477 } |
512 } |
478 |
513 |
479 inline void DiscoveredListIterator::load_ptrs(DEBUG_ONLY(bool allow_null_referent)) |
514 inline void DiscoveredListIterator::load_ptrs(DEBUG_ONLY(bool allow_null_referent)) { |
480 { |
|
481 _discovered_addr = java_lang_ref_Reference::discovered_addr(_ref); |
515 _discovered_addr = java_lang_ref_Reference::discovered_addr(_ref); |
482 assert(_discovered_addr && (*_discovered_addr)->is_oop_or_null(), |
516 oop discovered = java_lang_ref_Reference::discovered(_ref); |
|
517 assert(_discovered_addr && discovered->is_oop_or_null(), |
483 "discovered field is bad"); |
518 "discovered field is bad"); |
484 _next = *_discovered_addr; |
519 _next = discovered; |
485 _referent_addr = java_lang_ref_Reference::referent_addr(_ref); |
520 _referent_addr = java_lang_ref_Reference::referent_addr(_ref); |
486 _referent = *_referent_addr; |
521 _referent = java_lang_ref_Reference::referent(_ref); |
487 assert(Universe::heap()->is_in_reserved_or_null(_referent), |
522 assert(Universe::heap()->is_in_reserved_or_null(_referent), |
488 "Wrong oop found in java.lang.Reference object"); |
523 "Wrong oop found in java.lang.Reference object"); |
489 assert(allow_null_referent ? |
524 assert(allow_null_referent ? |
490 _referent->is_oop_or_null() |
525 _referent->is_oop_or_null() |
491 : _referent->is_oop(), |
526 : _referent->is_oop(), |
492 "bad referent"); |
527 "bad referent"); |
493 } |
528 } |
494 |
529 |
495 inline void DiscoveredListIterator::next() |
530 inline void DiscoveredListIterator::next() { |
496 { |
|
497 _prev_next = _discovered_addr; |
531 _prev_next = _discovered_addr; |
498 move_to_next(); |
532 move_to_next(); |
499 } |
533 } |
500 |
534 |
501 inline void DiscoveredListIterator::remove() |
535 inline void DiscoveredListIterator::remove() { |
502 { |
|
503 assert(_ref->is_oop(), "Dropping a bad reference"); |
536 assert(_ref->is_oop(), "Dropping a bad reference"); |
504 // Clear the discovered_addr field so that the object does |
537 oop_store_raw(_discovered_addr, NULL); |
505 // not look like it has been discovered. |
538 // First _prev_next ref actually points into DiscoveredList (gross). |
506 *_discovered_addr = NULL; |
539 if (UseCompressedOops) { |
507 // Remove Reference object from list. |
540 // Remove Reference object from list. |
508 *_prev_next = _next; |
541 oopDesc::encode_store_heap_oop_not_null((narrowOop*)_prev_next, _next); |
|
542 } else { |
|
543 // Remove Reference object from list. |
|
544 oopDesc::store_heap_oop((oop*)_prev_next, _next); |
|
545 } |
509 NOT_PRODUCT(_removed++); |
546 NOT_PRODUCT(_removed++); |
510 move_to_next(); |
547 move_to_next(); |
511 } |
548 } |
512 |
549 |
513 inline void DiscoveredListIterator::move_to_next() |
550 inline void DiscoveredListIterator::move_to_next() { |
514 { |
|
515 _ref = _next; |
551 _ref = _next; |
516 assert(_ref != _first_seen, "cyclic ref_list found"); |
552 assert(_ref != _first_seen, "cyclic ref_list found"); |
517 NOT_PRODUCT(_processed++); |
553 NOT_PRODUCT(_processed++); |
518 } |
554 } |
519 |
|
520 |
555 |
521 // NOTE: process_phase*() are largely similar, and at a high level |
556 // NOTE: process_phase*() are largely similar, and at a high level |
522 // merely iterate over the extant list applying a predicate to |
557 // merely iterate over the extant list applying a predicate to |
523 // each of its elements and possibly removing that element from the |
558 // each of its elements and possibly removing that element from the |
524 // list and applying some further closures to that element. |
559 // list and applying some further closures to that element. |
529 |
564 |
530 // (SoftReferences only) Traverse the list and remove any SoftReferences whose |
565 // (SoftReferences only) Traverse the list and remove any SoftReferences whose |
531 // referents are not alive, but that should be kept alive for policy reasons. |
566 // referents are not alive, but that should be kept alive for policy reasons. |
532 // Keep alive the transitive closure of all such referents. |
567 // Keep alive the transitive closure of all such referents. |
533 void |
568 void |
534 ReferenceProcessor::process_phase1(DiscoveredList& refs_list_addr, |
569 ReferenceProcessor::process_phase1(DiscoveredList& refs_list, |
535 ReferencePolicy* policy, |
570 ReferencePolicy* policy, |
536 BoolObjectClosure* is_alive, |
571 BoolObjectClosure* is_alive, |
537 OopClosure* keep_alive, |
572 OopClosure* keep_alive, |
538 VoidClosure* complete_gc) { |
573 VoidClosure* complete_gc) { |
539 assert(policy != NULL, "Must have a non-NULL policy"); |
574 assert(policy != NULL, "Must have a non-NULL policy"); |
540 DiscoveredListIterator iter(refs_list_addr, keep_alive, is_alive); |
575 DiscoveredListIterator iter(refs_list, keep_alive, is_alive); |
541 // Decide which softly reachable refs should be kept alive. |
576 // Decide which softly reachable refs should be kept alive. |
542 while (iter.has_next()) { |
577 while (iter.has_next()) { |
543 iter.load_ptrs(DEBUG_ONLY(!discovery_is_atomic() /* allow_null_referent */)); |
578 iter.load_ptrs(DEBUG_ONLY(!discovery_is_atomic() /* allow_null_referent */)); |
544 bool referent_is_dead = (iter.referent() != NULL) && !iter.is_referent_alive(); |
579 bool referent_is_dead = (iter.referent() != NULL) && !iter.is_referent_alive(); |
545 if (referent_is_dead && !policy->should_clear_reference(iter.obj())) { |
580 if (referent_is_dead && !policy->should_clear_reference(iter.obj())) { |
546 if (TraceReferenceGC) { |
581 if (TraceReferenceGC) { |
547 gclog_or_tty->print_cr("Dropping reference (" INTPTR_FORMAT ": %s" ") by policy", |
582 gclog_or_tty->print_cr("Dropping reference (" INTPTR_FORMAT ": %s" ") by policy", |
548 (address)iter.obj(), iter.obj()->blueprint()->internal_name()); |
583 iter.obj(), iter.obj()->blueprint()->internal_name()); |
549 } |
584 } |
550 // Make the Reference object active again |
585 // Make the Reference object active again |
551 iter.make_active(); |
586 iter.make_active(); |
552 // keep the referent around |
587 // keep the referent around |
553 iter.make_referent_alive(); |
588 iter.make_referent_alive(); |
568 } |
603 } |
569 |
604 |
570 // Traverse the list and remove any Refs that are not active, or |
605 // Traverse the list and remove any Refs that are not active, or |
571 // whose referents are either alive or NULL. |
606 // whose referents are either alive or NULL. |
572 void |
607 void |
573 ReferenceProcessor::pp2_work(DiscoveredList& refs_list_addr, |
608 ReferenceProcessor::pp2_work(DiscoveredList& refs_list, |
574 BoolObjectClosure* is_alive, |
609 BoolObjectClosure* is_alive, |
575 OopClosure* keep_alive) |
610 OopClosure* keep_alive) { |
576 { |
|
577 assert(discovery_is_atomic(), "Error"); |
611 assert(discovery_is_atomic(), "Error"); |
578 DiscoveredListIterator iter(refs_list_addr, keep_alive, is_alive); |
612 DiscoveredListIterator iter(refs_list, keep_alive, is_alive); |
579 while (iter.has_next()) { |
613 while (iter.has_next()) { |
580 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */)); |
614 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */)); |
581 DEBUG_ONLY(oop* next_addr = java_lang_ref_Reference::next_addr(iter.obj());) |
615 DEBUG_ONLY(oop next = java_lang_ref_Reference::next(iter.obj());) |
582 assert(*next_addr == NULL, "Should not discover inactive Reference"); |
616 assert(next == NULL, "Should not discover inactive Reference"); |
583 if (iter.is_referent_alive()) { |
617 if (iter.is_referent_alive()) { |
584 if (TraceReferenceGC) { |
618 if (TraceReferenceGC) { |
585 gclog_or_tty->print_cr("Dropping strongly reachable reference (" INTPTR_FORMAT ": %s)", |
619 gclog_or_tty->print_cr("Dropping strongly reachable reference (" INTPTR_FORMAT ": %s)", |
586 (address)iter.obj(), iter.obj()->blueprint()->internal_name()); |
620 iter.obj(), iter.obj()->blueprint()->internal_name()); |
587 } |
621 } |
588 // The referent is reachable after all. |
622 // The referent is reachable after all. |
589 // Update the referent pointer as necessary: Note that this |
623 // Update the referent pointer as necessary: Note that this |
590 // should not entail any recursive marking because the |
624 // should not entail any recursive marking because the |
591 // referent must already have been traversed. |
625 // referent must already have been traversed. |
603 } |
637 } |
604 ) |
638 ) |
605 } |
639 } |
606 |
640 |
607 void |
641 void |
608 ReferenceProcessor::pp2_work_concurrent_discovery( |
642 ReferenceProcessor::pp2_work_concurrent_discovery(DiscoveredList& refs_list, |
609 DiscoveredList& refs_list_addr, |
643 BoolObjectClosure* is_alive, |
610 BoolObjectClosure* is_alive, |
644 OopClosure* keep_alive, |
611 OopClosure* keep_alive, |
645 VoidClosure* complete_gc) { |
612 VoidClosure* complete_gc) |
|
613 { |
|
614 assert(!discovery_is_atomic(), "Error"); |
646 assert(!discovery_is_atomic(), "Error"); |
615 DiscoveredListIterator iter(refs_list_addr, keep_alive, is_alive); |
647 DiscoveredListIterator iter(refs_list, keep_alive, is_alive); |
616 while (iter.has_next()) { |
648 while (iter.has_next()) { |
617 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */)); |
649 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */)); |
618 oop* next_addr = java_lang_ref_Reference::next_addr(iter.obj()); |
650 HeapWord* next_addr = java_lang_ref_Reference::next_addr(iter.obj()); |
|
651 oop next = java_lang_ref_Reference::next(iter.obj()); |
619 if ((iter.referent() == NULL || iter.is_referent_alive() || |
652 if ((iter.referent() == NULL || iter.is_referent_alive() || |
620 *next_addr != NULL)) { |
653 next != NULL)) { |
621 assert((*next_addr)->is_oop_or_null(), "bad next field"); |
654 assert(next->is_oop_or_null(), "bad next field"); |
622 // Remove Reference object from list |
655 // Remove Reference object from list |
623 iter.remove(); |
656 iter.remove(); |
624 // Trace the cohorts |
657 // Trace the cohorts |
625 iter.make_referent_alive(); |
658 iter.make_referent_alive(); |
626 keep_alive->do_oop(next_addr); |
659 if (UseCompressedOops) { |
|
660 keep_alive->do_oop((narrowOop*)next_addr); |
|
661 } else { |
|
662 keep_alive->do_oop((oop*)next_addr); |
|
663 } |
627 } else { |
664 } else { |
628 iter.next(); |
665 iter.next(); |
629 } |
666 } |
630 } |
667 } |
631 // Now close the newly reachable set |
668 // Now close the newly reachable set |
637 } |
674 } |
638 ) |
675 ) |
639 } |
676 } |
640 |
677 |
641 // Traverse the list and process the referents, by either |
678 // Traverse the list and process the referents, by either |
642 // either clearing them or keeping them (and their reachable |
679 // clearing them or keeping them (and their reachable |
643 // closure) alive. |
680 // closure) alive. |
644 void |
681 void |
645 ReferenceProcessor::process_phase3(DiscoveredList& refs_list_addr, |
682 ReferenceProcessor::process_phase3(DiscoveredList& refs_list, |
646 bool clear_referent, |
683 bool clear_referent, |
647 BoolObjectClosure* is_alive, |
684 BoolObjectClosure* is_alive, |
648 OopClosure* keep_alive, |
685 OopClosure* keep_alive, |
649 VoidClosure* complete_gc) { |
686 VoidClosure* complete_gc) { |
650 DiscoveredListIterator iter(refs_list_addr, keep_alive, is_alive); |
687 DiscoveredListIterator iter(refs_list, keep_alive, is_alive); |
651 while (iter.has_next()) { |
688 while (iter.has_next()) { |
652 iter.update_discovered(); |
689 iter.update_discovered(); |
653 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */)); |
690 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */)); |
654 if (clear_referent) { |
691 if (clear_referent) { |
655 // NULL out referent pointer |
692 // NULL out referent pointer |
659 iter.make_referent_alive(); |
696 iter.make_referent_alive(); |
660 } |
697 } |
661 if (TraceReferenceGC) { |
698 if (TraceReferenceGC) { |
662 gclog_or_tty->print_cr("Adding %sreference (" INTPTR_FORMAT ": %s) as pending", |
699 gclog_or_tty->print_cr("Adding %sreference (" INTPTR_FORMAT ": %s) as pending", |
663 clear_referent ? "cleared " : "", |
700 clear_referent ? "cleared " : "", |
664 (address)iter.obj(), iter.obj()->blueprint()->internal_name()); |
701 iter.obj(), iter.obj()->blueprint()->internal_name()); |
665 } |
702 } |
666 assert(iter.obj()->is_oop(UseConcMarkSweepGC), "Adding a bad reference"); |
703 assert(iter.obj()->is_oop(UseConcMarkSweepGC), "Adding a bad reference"); |
667 // If discovery is concurrent, we may have objects with null referents, |
704 // If discovery is concurrent, we may have objects with null referents, |
668 // being those that were concurrently cleared after they were discovered |
705 // being those that were concurrently cleared after they were discovered |
669 // (and not subsequently precleaned). |
706 // (and not subsequently precleaned). |
677 // Close the reachable set |
714 // Close the reachable set |
678 complete_gc->do_void(); |
715 complete_gc->do_void(); |
679 } |
716 } |
680 |
717 |
681 void |
718 void |
682 ReferenceProcessor::abandon_partial_discovered_list(DiscoveredList& ref_list) { |
719 ReferenceProcessor::abandon_partial_discovered_list(DiscoveredList& refs_list) { |
683 oop obj = ref_list.head(); |
720 oop obj = refs_list.head(); |
684 while (obj != _sentinelRef) { |
721 while (obj != sentinel_ref()) { |
685 oop* discovered_addr = java_lang_ref_Reference::discovered_addr(obj); |
722 oop discovered = java_lang_ref_Reference::discovered(obj); |
686 obj = *discovered_addr; |
723 java_lang_ref_Reference::set_discovered_raw(obj, NULL); |
687 *discovered_addr = NULL; |
724 obj = discovered; |
688 } |
725 } |
689 ref_list.set_head(_sentinelRef); |
726 refs_list.set_head(sentinel_ref()); |
690 ref_list.set_length(0); |
727 refs_list.set_length(0); |
691 } |
728 } |
692 |
729 |
693 void |
730 void |
694 ReferenceProcessor::abandon_partial_discovered_list_arr(DiscoveredList refs_lists[]) { |
731 ReferenceProcessor::abandon_partial_discovered_list_arr(DiscoveredList refs_lists[]) { |
695 for (int i = 0; i < _num_q; i++) { |
732 for (int i = 0; i < _num_q; i++) { |
775 oop move_tail = move_head; |
812 oop move_tail = move_head; |
776 oop new_head = move_head; |
813 oop new_head = move_head; |
777 // find an element to split the list on |
814 // find an element to split the list on |
778 for (size_t j = 0; j < refs_to_move; ++j) { |
815 for (size_t j = 0; j < refs_to_move; ++j) { |
779 move_tail = new_head; |
816 move_tail = new_head; |
780 new_head = *java_lang_ref_Reference::discovered_addr(new_head); |
817 new_head = java_lang_ref_Reference::discovered(new_head); |
781 } |
818 } |
782 java_lang_ref_Reference::set_discovered(move_tail, ref_lists[to_idx].head()); |
819 java_lang_ref_Reference::set_discovered(move_tail, ref_lists[to_idx].head()); |
783 ref_lists[to_idx].set_head(move_head); |
820 ref_lists[to_idx].set_head(move_head); |
784 ref_lists[to_idx].set_length(ref_lists[to_idx].length() + refs_to_move); |
821 ref_lists[to_idx].set_length(ref_lists[to_idx].length() + refs_to_move); |
785 ref_lists[from_idx].set_head(new_head); |
822 ref_lists[from_idx].set_head(new_head); |
873 assert(!discovery_is_atomic(), "Else why call this method?"); |
910 assert(!discovery_is_atomic(), "Else why call this method?"); |
874 DiscoveredListIterator iter(refs_list, NULL, NULL); |
911 DiscoveredListIterator iter(refs_list, NULL, NULL); |
875 size_t length = refs_list.length(); |
912 size_t length = refs_list.length(); |
876 while (iter.has_next()) { |
913 while (iter.has_next()) { |
877 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */)); |
914 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */)); |
878 oop* next_addr = java_lang_ref_Reference::next_addr(iter.obj()); |
915 oop next = java_lang_ref_Reference::next(iter.obj()); |
879 assert((*next_addr)->is_oop_or_null(), "bad next field"); |
916 assert(next->is_oop_or_null(), "bad next field"); |
880 // If referent has been cleared or Reference is not active, |
917 // If referent has been cleared or Reference is not active, |
881 // drop it. |
918 // drop it. |
882 if (iter.referent() == NULL || *next_addr != NULL) { |
919 if (iter.referent() == NULL || next != NULL) { |
883 debug_only( |
920 debug_only( |
884 if (PrintGCDetails && TraceReferenceGC) { |
921 if (PrintGCDetails && TraceReferenceGC) { |
885 gclog_or_tty->print_cr("clean_up_discovered_list: Dropping Reference: " |
922 gclog_or_tty->print_cr("clean_up_discovered_list: Dropping Reference: " |
886 INTPTR_FORMAT " with next field: " INTPTR_FORMAT |
923 INTPTR_FORMAT " with next field: " INTPTR_FORMAT |
887 " and referent: " INTPTR_FORMAT, |
924 " and referent: " INTPTR_FORMAT, |
888 (address)iter.obj(), (address)*next_addr, (address)iter.referent()); |
925 iter.obj(), next, iter.referent()); |
889 } |
926 } |
890 ) |
927 ) |
891 // Remove Reference object from list |
928 // Remove Reference object from list |
892 iter.remove(); |
929 iter.remove(); |
893 --length; |
930 --length; |
948 ShouldNotReachHere(); |
985 ShouldNotReachHere(); |
949 } |
986 } |
950 return list; |
987 return list; |
951 } |
988 } |
952 |
989 |
953 inline void ReferenceProcessor::add_to_discovered_list_mt(DiscoveredList& list, |
990 inline void |
954 oop obj, oop* discovered_addr) { |
991 ReferenceProcessor::add_to_discovered_list_mt(DiscoveredList& refs_list, |
|
992 oop obj, |
|
993 HeapWord* discovered_addr) { |
955 assert(_discovery_is_mt, "!_discovery_is_mt should have been handled by caller"); |
994 assert(_discovery_is_mt, "!_discovery_is_mt should have been handled by caller"); |
956 // First we must make sure this object is only enqueued once. CAS in a non null |
995 // First we must make sure this object is only enqueued once. CAS in a non null |
957 // discovered_addr. |
996 // discovered_addr. |
958 oop retest = (oop)Atomic::cmpxchg_ptr(list.head(), discovered_addr, NULL); |
997 oop retest = oopDesc::atomic_compare_exchange_oop(refs_list.head(), discovered_addr, |
|
998 NULL); |
959 if (retest == NULL) { |
999 if (retest == NULL) { |
960 // This thread just won the right to enqueue the object. |
1000 // This thread just won the right to enqueue the object. |
961 // We have separate lists for enqueueing so no synchronization |
1001 // We have separate lists for enqueueing so no synchronization |
962 // is necessary. |
1002 // is necessary. |
963 list.set_head(obj); |
1003 refs_list.set_head(obj); |
964 list.set_length(list.length() + 1); |
1004 refs_list.set_length(refs_list.length() + 1); |
965 } else { |
1005 } else { |
966 // If retest was non NULL, another thread beat us to it: |
1006 // If retest was non NULL, another thread beat us to it: |
967 // The reference has already been discovered... |
1007 // The reference has already been discovered... |
968 if (TraceReferenceGC) { |
1008 if (TraceReferenceGC) { |
969 gclog_or_tty->print_cr("Already enqueued reference (" INTPTR_FORMAT ": %s)", |
1009 gclog_or_tty->print_cr("Already enqueued reference (" INTPTR_FORMAT ": %s)", |
970 obj, obj->blueprint()->internal_name()); |
1010 obj, obj->blueprint()->internal_name()); |
971 } |
1011 } |
972 } |
1012 } |
973 } |
1013 } |
974 |
|
975 |
1014 |
976 // We mention two of several possible choices here: |
1015 // We mention two of several possible choices here: |
977 // #0: if the reference object is not in the "originating generation" |
1016 // #0: if the reference object is not in the "originating generation" |
978 // (or part of the heap being collected, indicated by our "span" |
1017 // (or part of the heap being collected, indicated by our "span" |
979 // we don't treat it specially (i.e. we scan it as we would |
1018 // we don't treat it specially (i.e. we scan it as we would |
1032 if (is_alive_non_header()->do_object_b(referent)) { |
1071 if (is_alive_non_header()->do_object_b(referent)) { |
1033 return false; // referent is reachable |
1072 return false; // referent is reachable |
1034 } |
1073 } |
1035 } |
1074 } |
1036 |
1075 |
1037 oop* discovered_addr = java_lang_ref_Reference::discovered_addr(obj); |
1076 HeapWord* discovered_addr = java_lang_ref_Reference::discovered_addr(obj); |
1038 assert(discovered_addr != NULL && (*discovered_addr)->is_oop_or_null(), |
1077 oop discovered = java_lang_ref_Reference::discovered(obj); |
1039 "bad discovered field"); |
1078 assert(discovered->is_oop_or_null(), "bad discovered field"); |
1040 if (*discovered_addr != NULL) { |
1079 if (discovered != NULL) { |
1041 // The reference has already been discovered... |
1080 // The reference has already been discovered... |
1042 if (TraceReferenceGC) { |
1081 if (TraceReferenceGC) { |
1043 gclog_or_tty->print_cr("Already enqueued reference (" INTPTR_FORMAT ": %s)", |
1082 gclog_or_tty->print_cr("Already enqueued reference (" INTPTR_FORMAT ": %s)", |
1044 (oopDesc*)obj, obj->blueprint()->internal_name()); |
1083 obj, obj->blueprint()->internal_name()); |
1045 } |
1084 } |
1046 if (RefDiscoveryPolicy == ReferentBasedDiscovery) { |
1085 if (RefDiscoveryPolicy == ReferentBasedDiscovery) { |
1047 // assumes that an object is not processed twice; |
1086 // assumes that an object is not processed twice; |
1048 // if it's been already discovered it must be on another |
1087 // if it's been already discovered it must be on another |
1049 // generation's discovered list; so we won't discover it. |
1088 // generation's discovered list; so we won't discover it. |
1086 // We do a raw store here, the field will be visited later when |
1125 // We do a raw store here, the field will be visited later when |
1087 // processing the discovered references. |
1126 // processing the discovered references. |
1088 if (_discovery_is_mt) { |
1127 if (_discovery_is_mt) { |
1089 add_to_discovered_list_mt(*list, obj, discovered_addr); |
1128 add_to_discovered_list_mt(*list, obj, discovered_addr); |
1090 } else { |
1129 } else { |
1091 *discovered_addr = list->head(); |
1130 oop_store_raw(discovered_addr, list->head()); |
1092 list->set_head(obj); |
1131 list->set_head(obj); |
1093 list->set_length(list->length() + 1); |
1132 list->set_length(list->length() + 1); |
1094 } |
1133 } |
1095 |
1134 |
1096 // In the MT discovery case, it is currently possible to see |
1135 // In the MT discovery case, it is currently possible to see |
1104 // only used for debugging support. |
1143 // only used for debugging support. |
1105 if (TraceReferenceGC) { |
1144 if (TraceReferenceGC) { |
1106 oop referent = java_lang_ref_Reference::referent(obj); |
1145 oop referent = java_lang_ref_Reference::referent(obj); |
1107 if (PrintGCDetails) { |
1146 if (PrintGCDetails) { |
1108 gclog_or_tty->print_cr("Enqueued reference (" INTPTR_FORMAT ": %s)", |
1147 gclog_or_tty->print_cr("Enqueued reference (" INTPTR_FORMAT ": %s)", |
1109 (oopDesc*) obj, obj->blueprint()->internal_name()); |
1148 obj, obj->blueprint()->internal_name()); |
1110 } |
1149 } |
1111 assert(referent->is_oop(), "Enqueued a bad referent"); |
1150 assert(referent->is_oop(), "Enqueued a bad referent"); |
1112 } |
1151 } |
1113 assert(obj->is_oop(), "Enqueued a bad reference"); |
1152 assert(obj->is_oop(), "Enqueued a bad reference"); |
1114 return true; |
1153 return true; |
1179 // Walk the given discovered ref list, and remove all reference objects |
1218 // Walk the given discovered ref list, and remove all reference objects |
1180 // whose referents are still alive, whose referents are NULL or which |
1219 // whose referents are still alive, whose referents are NULL or which |
1181 // are not active (have a non-NULL next field). NOTE: For this to work |
1220 // are not active (have a non-NULL next field). NOTE: For this to work |
1182 // correctly, refs discovery can not be happening concurrently with this |
1221 // correctly, refs discovery can not be happening concurrently with this |
1183 // step. |
1222 // step. |
1184 void ReferenceProcessor::preclean_discovered_reflist( |
1223 void |
1185 DiscoveredList& refs_list, BoolObjectClosure* is_alive, |
1224 ReferenceProcessor::preclean_discovered_reflist(DiscoveredList& refs_list, |
1186 OopClosure* keep_alive, VoidClosure* complete_gc, YieldClosure* yield) { |
1225 BoolObjectClosure* is_alive, |
1187 |
1226 OopClosure* keep_alive, |
|
1227 VoidClosure* complete_gc, |
|
1228 YieldClosure* yield) { |
1188 DiscoveredListIterator iter(refs_list, keep_alive, is_alive); |
1229 DiscoveredListIterator iter(refs_list, keep_alive, is_alive); |
1189 size_t length = refs_list.length(); |
1230 size_t length = refs_list.length(); |
1190 while (iter.has_next()) { |
1231 while (iter.has_next()) { |
1191 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */)); |
1232 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */)); |
1192 oop* next_addr = java_lang_ref_Reference::next_addr(iter.obj()); |
1233 oop obj = iter.obj(); |
|
1234 oop next = java_lang_ref_Reference::next(obj); |
1193 if (iter.referent() == NULL || iter.is_referent_alive() || |
1235 if (iter.referent() == NULL || iter.is_referent_alive() || |
1194 *next_addr != NULL) { |
1236 next != NULL) { |
1195 // The referent has been cleared, or is alive, or the Reference is not |
1237 // The referent has been cleared, or is alive, or the Reference is not |
1196 // active; we need to trace and mark its cohort. |
1238 // active; we need to trace and mark its cohort. |
1197 if (TraceReferenceGC) { |
1239 if (TraceReferenceGC) { |
1198 gclog_or_tty->print_cr("Precleaning Reference (" INTPTR_FORMAT ": %s)", |
1240 gclog_or_tty->print_cr("Precleaning Reference (" INTPTR_FORMAT ": %s)", |
1199 iter.obj(), iter.obj()->blueprint()->internal_name()); |
1241 iter.obj(), iter.obj()->blueprint()->internal_name()); |
1239 // empty for now |
1287 // empty for now |
1240 } |
1288 } |
1241 #endif |
1289 #endif |
1242 |
1290 |
1243 void ReferenceProcessor::verify() { |
1291 void ReferenceProcessor::verify() { |
1244 guarantee(_sentinelRef != NULL && _sentinelRef->is_oop(), "Lost _sentinelRef"); |
1292 guarantee(sentinel_ref() != NULL && sentinel_ref()->is_oop(), "Lost _sentinelRef"); |
1245 } |
1293 } |
1246 |
1294 |
1247 #ifndef PRODUCT |
1295 #ifndef PRODUCT |
1248 void ReferenceProcessor::clear_discovered_references() { |
1296 void ReferenceProcessor::clear_discovered_references() { |
1249 guarantee(!_discovering_refs, "Discovering refs?"); |
1297 guarantee(!_discovering_refs, "Discovering refs?"); |
1250 for (int i = 0; i < _num_q * subclasses_of_ref; i++) { |
1298 for (int i = 0; i < _num_q * subclasses_of_ref; i++) { |
1251 oop obj = _discoveredSoftRefs[i].head(); |
1299 oop obj = _discoveredSoftRefs[i].head(); |
1252 while (obj != _sentinelRef) { |
1300 while (obj != sentinel_ref()) { |
1253 oop next = java_lang_ref_Reference::discovered(obj); |
1301 oop next = java_lang_ref_Reference::discovered(obj); |
1254 java_lang_ref_Reference::set_discovered(obj, (oop) NULL); |
1302 java_lang_ref_Reference::set_discovered(obj, (oop) NULL); |
1255 obj = next; |
1303 obj = next; |
1256 } |
1304 } |
1257 _discoveredSoftRefs[i].set_head(_sentinelRef); |
1305 _discoveredSoftRefs[i].set_head(sentinel_ref()); |
1258 _discoveredSoftRefs[i].set_length(0); |
1306 _discoveredSoftRefs[i].set_length(0); |
1259 } |
1307 } |
1260 } |
1308 } |
1261 #endif // PRODUCT |
1309 #endif // PRODUCT |