33 #include "runtime/java.hpp" |
33 #include "runtime/java.hpp" |
34 #include "runtime/jniHandles.hpp" |
34 #include "runtime/jniHandles.hpp" |
35 |
35 |
36 ReferencePolicy* ReferenceProcessor::_always_clear_soft_ref_policy = NULL; |
36 ReferencePolicy* ReferenceProcessor::_always_clear_soft_ref_policy = NULL; |
37 ReferencePolicy* ReferenceProcessor::_default_soft_ref_policy = NULL; |
37 ReferencePolicy* ReferenceProcessor::_default_soft_ref_policy = NULL; |
38 const int subclasses_of_ref = REF_PHANTOM - REF_OTHER; |
|
39 bool ReferenceProcessor::_pending_list_uses_discovered_field = false; |
38 bool ReferenceProcessor::_pending_list_uses_discovered_field = false; |
40 |
|
41 // List of discovered references. |
|
42 class DiscoveredList { |
|
43 public: |
|
44 DiscoveredList() : _len(0), _compressed_head(0), _oop_head(NULL) { } |
|
45 oop head() const { |
|
46 return UseCompressedOops ? oopDesc::decode_heap_oop(_compressed_head) : |
|
47 _oop_head; |
|
48 } |
|
49 HeapWord* adr_head() { |
|
50 return UseCompressedOops ? (HeapWord*)&_compressed_head : |
|
51 (HeapWord*)&_oop_head; |
|
52 } |
|
53 void set_head(oop o) { |
|
54 if (UseCompressedOops) { |
|
55 // Must compress the head ptr. |
|
56 _compressed_head = oopDesc::encode_heap_oop(o); |
|
57 } else { |
|
58 _oop_head = o; |
|
59 } |
|
60 } |
|
61 bool empty() const { return head() == NULL; } |
|
62 size_t length() { return _len; } |
|
63 void set_length(size_t len) { _len = len; } |
|
64 void inc_length(size_t inc) { _len += inc; assert(_len > 0, "Error"); } |
|
65 void dec_length(size_t dec) { _len -= dec; } |
|
66 private: |
|
67 // Set value depending on UseCompressedOops. This could be a template class |
|
68 // but then we have to fix all the instantiations and declarations that use this class. |
|
69 oop _oop_head; |
|
70 narrowOop _compressed_head; |
|
71 size_t _len; |
|
72 }; |
|
73 |
39 |
74 void referenceProcessor_init() { |
40 void referenceProcessor_init() { |
75 ReferenceProcessor::init_statics(); |
41 ReferenceProcessor::init_statics(); |
76 } |
42 } |
77 |
43 |
110 _span = span; |
76 _span = span; |
111 _discovery_is_atomic = atomic_discovery; |
77 _discovery_is_atomic = atomic_discovery; |
112 _discovery_is_mt = mt_discovery; |
78 _discovery_is_mt = mt_discovery; |
113 _num_q = MAX2(1, mt_processing_degree); |
79 _num_q = MAX2(1, mt_processing_degree); |
114 _max_num_q = MAX2(_num_q, mt_discovery_degree); |
80 _max_num_q = MAX2(_num_q, mt_discovery_degree); |
115 _discoveredSoftRefs = NEW_C_HEAP_ARRAY(DiscoveredList, _max_num_q * subclasses_of_ref); |
81 _discoveredSoftRefs = NEW_C_HEAP_ARRAY(DiscoveredList, |
|
82 _max_num_q * number_of_subclasses_of_ref()); |
116 if (_discoveredSoftRefs == NULL) { |
83 if (_discoveredSoftRefs == NULL) { |
117 vm_exit_during_initialization("Could not allocated RefProc Array"); |
84 vm_exit_during_initialization("Could not allocated RefProc Array"); |
118 } |
85 } |
119 _discoveredWeakRefs = &_discoveredSoftRefs[_max_num_q]; |
86 _discoveredWeakRefs = &_discoveredSoftRefs[_max_num_q]; |
120 _discoveredFinalRefs = &_discoveredWeakRefs[_max_num_q]; |
87 _discoveredFinalRefs = &_discoveredWeakRefs[_max_num_q]; |
121 _discoveredPhantomRefs = &_discoveredFinalRefs[_max_num_q]; |
88 _discoveredPhantomRefs = &_discoveredFinalRefs[_max_num_q]; |
122 // Initialized all entries to NULL |
89 // Initialized all entries to NULL |
123 for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) { |
90 for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) { |
124 _discoveredSoftRefs[i].set_head(NULL); |
91 _discoveredSoftRefs[i].set_head(NULL); |
125 _discoveredSoftRefs[i].set_length(0); |
92 _discoveredSoftRefs[i].set_length(0); |
126 } |
93 } |
127 // If we do barriers, cache a copy of the barrier set. |
94 // If we do barriers, cache a copy of the barrier set. |
128 if (discovered_list_needs_barrier) { |
95 if (discovered_list_needs_barrier) { |
132 } |
99 } |
133 |
100 |
134 #ifndef PRODUCT |
101 #ifndef PRODUCT |
135 void ReferenceProcessor::verify_no_references_recorded() { |
102 void ReferenceProcessor::verify_no_references_recorded() { |
136 guarantee(!_discovering_refs, "Discovering refs?"); |
103 guarantee(!_discovering_refs, "Discovering refs?"); |
137 for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) { |
104 for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) { |
138 guarantee(_discoveredSoftRefs[i].empty(), |
105 guarantee(_discoveredSoftRefs[i].is_empty(), |
139 "Found non-empty discovered list"); |
106 "Found non-empty discovered list"); |
140 } |
107 } |
141 } |
108 } |
142 #endif |
109 #endif |
143 |
110 |
144 void ReferenceProcessor::weak_oops_do(OopClosure* f) { |
111 void ReferenceProcessor::weak_oops_do(OopClosure* f) { |
145 // Should this instead be |
112 for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) { |
146 // for (int i = 0; i < subclasses_of_ref; i++_ { |
|
147 // for (int j = 0; j < _num_q; j++) { |
|
148 // int index = i * _max_num_q + j; |
|
149 for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) { |
|
150 if (UseCompressedOops) { |
113 if (UseCompressedOops) { |
151 f->do_oop((narrowOop*)_discoveredSoftRefs[i].adr_head()); |
114 f->do_oop((narrowOop*)_discoveredSoftRefs[i].adr_head()); |
152 } else { |
115 } else { |
153 f->do_oop((oop*)_discoveredSoftRefs[i].adr_head()); |
116 f->do_oop((oop*)_discoveredSoftRefs[i].adr_head()); |
154 } |
117 } |
402 // (n_queues) with which that ReferenceProcessor was created. That |
365 // (n_queues) with which that ReferenceProcessor was created. That |
403 // is because of the "clever" way the discovered references lists were |
366 // is because of the "clever" way the discovered references lists were |
404 // allocated and are indexed into. |
367 // allocated and are indexed into. |
405 assert(_n_queues == (int) _ref_processor.max_num_q(), "Different number not expected"); |
368 assert(_n_queues == (int) _ref_processor.max_num_q(), "Different number not expected"); |
406 for (int j = 0; |
369 for (int j = 0; |
407 j < subclasses_of_ref; |
370 j < ReferenceProcessor::number_of_subclasses_of_ref(); |
408 j++, index += _n_queues) { |
371 j++, index += _n_queues) { |
409 _ref_processor.enqueue_discovered_reflist( |
372 _ref_processor.enqueue_discovered_reflist( |
410 _refs_lists[index], _pending_list_addr); |
373 _refs_lists[index], _pending_list_addr); |
411 _refs_lists[index].set_head(NULL); |
374 _refs_lists[index].set_head(NULL); |
412 _refs_lists[index].set_length(0); |
375 _refs_lists[index].set_length(0); |
422 RefProcEnqueueTask tsk(*this, _discoveredSoftRefs, |
385 RefProcEnqueueTask tsk(*this, _discoveredSoftRefs, |
423 pending_list_addr, _max_num_q); |
386 pending_list_addr, _max_num_q); |
424 task_executor->execute(tsk); |
387 task_executor->execute(tsk); |
425 } else { |
388 } else { |
426 // Serial code: call the parent class's implementation |
389 // Serial code: call the parent class's implementation |
427 for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) { |
390 for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) { |
428 enqueue_discovered_reflist(_discoveredSoftRefs[i], pending_list_addr); |
391 enqueue_discovered_reflist(_discoveredSoftRefs[i], pending_list_addr); |
429 _discoveredSoftRefs[i].set_head(NULL); |
392 _discoveredSoftRefs[i].set_head(NULL); |
430 _discoveredSoftRefs[i].set_length(0); |
393 _discoveredSoftRefs[i].set_length(0); |
431 } |
394 } |
432 } |
395 } |
433 } |
396 } |
434 |
397 |
435 // Iterator for the list of discovered references. |
398 void DiscoveredListIterator::load_ptrs(DEBUG_ONLY(bool allow_null_referent)) { |
436 class DiscoveredListIterator { |
|
437 public: |
|
438 inline DiscoveredListIterator(DiscoveredList& refs_list, |
|
439 OopClosure* keep_alive, |
|
440 BoolObjectClosure* is_alive); |
|
441 |
|
442 // End Of List. |
|
443 inline bool has_next() const { return _ref != NULL; } |
|
444 |
|
445 // Get oop to the Reference object. |
|
446 inline oop obj() const { return _ref; } |
|
447 |
|
448 // Get oop to the referent object. |
|
449 inline oop referent() const { return _referent; } |
|
450 |
|
451 // Returns true if referent is alive. |
|
452 inline bool is_referent_alive() const; |
|
453 |
|
454 // Loads data for the current reference. |
|
455 // The "allow_null_referent" argument tells us to allow for the possibility |
|
456 // of a NULL referent in the discovered Reference object. This typically |
|
457 // happens in the case of concurrent collectors that may have done the |
|
458 // discovery concurrently, or interleaved, with mutator execution. |
|
459 inline void load_ptrs(DEBUG_ONLY(bool allow_null_referent)); |
|
460 |
|
461 // Move to the next discovered reference. |
|
462 inline void next(); |
|
463 |
|
464 // Remove the current reference from the list |
|
465 inline void remove(); |
|
466 |
|
467 // Make the Reference object active again. |
|
468 inline void make_active() { java_lang_ref_Reference::set_next(_ref, NULL); } |
|
469 |
|
470 // Make the referent alive. |
|
471 inline void make_referent_alive() { |
|
472 if (UseCompressedOops) { |
|
473 _keep_alive->do_oop((narrowOop*)_referent_addr); |
|
474 } else { |
|
475 _keep_alive->do_oop((oop*)_referent_addr); |
|
476 } |
|
477 } |
|
478 |
|
479 // Update the discovered field. |
|
480 inline void update_discovered() { |
|
481 // First _prev_next ref actually points into DiscoveredList (gross). |
|
482 if (UseCompressedOops) { |
|
483 if (!oopDesc::is_null(*(narrowOop*)_prev_next)) { |
|
484 _keep_alive->do_oop((narrowOop*)_prev_next); |
|
485 } |
|
486 } else { |
|
487 if (!oopDesc::is_null(*(oop*)_prev_next)) { |
|
488 _keep_alive->do_oop((oop*)_prev_next); |
|
489 } |
|
490 } |
|
491 } |
|
492 |
|
493 // NULL out referent pointer. |
|
494 inline void clear_referent() { oop_store_raw(_referent_addr, NULL); } |
|
495 |
|
496 // Statistics |
|
497 NOT_PRODUCT( |
|
498 inline size_t processed() const { return _processed; } |
|
499 inline size_t removed() const { return _removed; } |
|
500 ) |
|
501 |
|
502 inline void move_to_next(); |
|
503 |
|
504 private: |
|
505 DiscoveredList& _refs_list; |
|
506 HeapWord* _prev_next; |
|
507 oop _prev; |
|
508 oop _ref; |
|
509 HeapWord* _discovered_addr; |
|
510 oop _next; |
|
511 HeapWord* _referent_addr; |
|
512 oop _referent; |
|
513 OopClosure* _keep_alive; |
|
514 BoolObjectClosure* _is_alive; |
|
515 DEBUG_ONLY( |
|
516 oop _first_seen; // cyclic linked list check |
|
517 ) |
|
518 NOT_PRODUCT( |
|
519 size_t _processed; |
|
520 size_t _removed; |
|
521 ) |
|
522 }; |
|
523 |
|
524 inline DiscoveredListIterator::DiscoveredListIterator(DiscoveredList& refs_list, |
|
525 OopClosure* keep_alive, |
|
526 BoolObjectClosure* is_alive) |
|
527 : _refs_list(refs_list), |
|
528 _prev_next(refs_list.adr_head()), |
|
529 _prev(NULL), |
|
530 _ref(refs_list.head()), |
|
531 #ifdef ASSERT |
|
532 _first_seen(refs_list.head()), |
|
533 #endif |
|
534 #ifndef PRODUCT |
|
535 _processed(0), |
|
536 _removed(0), |
|
537 #endif |
|
538 _next(NULL), |
|
539 _keep_alive(keep_alive), |
|
540 _is_alive(is_alive) |
|
541 { } |
|
542 |
|
543 inline bool DiscoveredListIterator::is_referent_alive() const { |
|
544 return _is_alive->do_object_b(_referent); |
|
545 } |
|
546 |
|
547 inline void DiscoveredListIterator::load_ptrs(DEBUG_ONLY(bool allow_null_referent)) { |
|
548 _discovered_addr = java_lang_ref_Reference::discovered_addr(_ref); |
399 _discovered_addr = java_lang_ref_Reference::discovered_addr(_ref); |
549 oop discovered = java_lang_ref_Reference::discovered(_ref); |
400 oop discovered = java_lang_ref_Reference::discovered(_ref); |
550 assert(_discovered_addr && discovered->is_oop_or_null(), |
401 assert(_discovered_addr && discovered->is_oop_or_null(), |
551 "discovered field is bad"); |
402 "discovered field is bad"); |
552 _next = discovered; |
403 _next = discovered; |
590 } |
435 } |
591 NOT_PRODUCT(_removed++); |
436 NOT_PRODUCT(_removed++); |
592 _refs_list.dec_length(1); |
437 _refs_list.dec_length(1); |
593 } |
438 } |
594 |
439 |
595 inline void DiscoveredListIterator::move_to_next() { |
440 // Make the Reference object active again. |
596 if (_ref == _next) { |
441 void DiscoveredListIterator::make_active() { |
597 // End of the list. |
442 // For G1 we don't want to use set_next - it |
598 _ref = NULL; |
443 // will dirty the card for the next field of |
|
444 // the reference object and will fail |
|
445 // CT verification. |
|
446 if (UseG1GC) { |
|
447 BarrierSet* bs = oopDesc::bs(); |
|
448 HeapWord* next_addr = java_lang_ref_Reference::next_addr(_ref); |
|
449 |
|
450 if (UseCompressedOops) { |
|
451 bs->write_ref_field_pre((narrowOop*)next_addr, NULL); |
|
452 } else { |
|
453 bs->write_ref_field_pre((oop*)next_addr, NULL); |
|
454 } |
|
455 java_lang_ref_Reference::set_next_raw(_ref, NULL); |
599 } else { |
456 } else { |
600 _ref = _next; |
457 java_lang_ref_Reference::set_next(_ref, NULL); |
601 } |
458 } |
602 assert(_ref != _first_seen, "cyclic ref_list found"); |
459 } |
603 NOT_PRODUCT(_processed++); |
460 |
|
461 void DiscoveredListIterator::clear_referent() { |
|
462 oop_store_raw(_referent_addr, NULL); |
604 } |
463 } |
605 |
464 |
606 // NOTE: process_phase*() are largely similar, and at a high level |
465 // NOTE: process_phase*() are largely similar, and at a high level |
607 // merely iterate over the extant list applying a predicate to |
466 // merely iterate over the extant list applying a predicate to |
608 // each of its elements and possibly removing that element from the |
467 // each of its elements and possibly removing that element from the |
856 } |
714 } |
857 private: |
715 private: |
858 bool _clear_referent; |
716 bool _clear_referent; |
859 }; |
717 }; |
860 |
718 |
|
719 void ReferenceProcessor::set_discovered(oop ref, oop value) { |
|
720 if (_discovered_list_needs_barrier) { |
|
721 java_lang_ref_Reference::set_discovered(ref, value); |
|
722 } else { |
|
723 java_lang_ref_Reference::set_discovered_raw(ref, value); |
|
724 } |
|
725 } |
|
726 |
861 // Balances reference queues. |
727 // Balances reference queues. |
862 // Move entries from all queues[0, 1, ..., _max_num_q-1] to |
728 // Move entries from all queues[0, 1, ..., _max_num_q-1] to |
863 // queues[0, 1, ..., _num_q-1] because only the first _num_q |
729 // queues[0, 1, ..., _num_q-1] because only the first _num_q |
864 // corresponding to the active workers will be processed. |
730 // corresponding to the active workers will be processed. |
865 void ReferenceProcessor::balance_queues(DiscoveredList ref_lists[]) |
731 void ReferenceProcessor::balance_queues(DiscoveredList ref_lists[]) |