32 #include "gc/shared/referencePolicy.hpp" |
32 #include "gc/shared/referencePolicy.hpp" |
33 #include "gc/shared/referenceProcessor.inline.hpp" |
33 #include "gc/shared/referenceProcessor.inline.hpp" |
34 #include "logging/log.hpp" |
34 #include "logging/log.hpp" |
35 #include "memory/allocation.hpp" |
35 #include "memory/allocation.hpp" |
36 #include "memory/resourceArea.hpp" |
36 #include "memory/resourceArea.hpp" |
|
37 #include "oops/access.inline.hpp" |
37 #include "oops/oop.inline.hpp" |
38 #include "oops/oop.inline.hpp" |
38 #include "runtime/java.hpp" |
39 #include "runtime/java.hpp" |
39 |
40 |
40 ReferencePolicy* ReferenceProcessor::_always_clear_soft_ref_policy = NULL; |
41 ReferencePolicy* ReferenceProcessor::_always_clear_soft_ref_policy = NULL; |
41 ReferencePolicy* ReferenceProcessor::_default_soft_ref_policy = NULL; |
42 ReferencePolicy* ReferenceProcessor::_default_soft_ref_policy = NULL; |
292 assert(java_lang_ref_Reference::next(obj) == NULL, |
293 assert(java_lang_ref_Reference::next(obj) == NULL, |
293 "Reference not active; should not be discovered"); |
294 "Reference not active; should not be discovered"); |
294 // Self-loop next, so as to make Ref not active. |
295 // Self-loop next, so as to make Ref not active. |
295 java_lang_ref_Reference::set_next_raw(obj, obj); |
296 java_lang_ref_Reference::set_next_raw(obj, obj); |
296 if (next_d != obj) { |
297 if (next_d != obj) { |
297 oopDesc::bs()->write_ref_field(java_lang_ref_Reference::discovered_addr(obj), next_d); |
298 HeapAccess<AS_NO_KEEPALIVE>::oop_store_at(obj, java_lang_ref_Reference::discovered_offset, next_d); |
298 } else { |
299 } else { |
299 // This is the last object. |
300 // This is the last object. |
300 // Swap refs_list into pending list and set obj's |
301 // Swap refs_list into pending list and set obj's |
301 // discovered to what we read from the pending list. |
302 // discovered to what we read from the pending list. |
302 oop old = Universe::swap_reference_pending_list(refs_list.head()); |
303 oop old = Universe::swap_reference_pending_list(refs_list.head()); |
303 java_lang_ref_Reference::set_discovered_raw(obj, old); // old may be NULL |
304 HeapAccess<AS_NO_KEEPALIVE>::oop_store_at(obj, java_lang_ref_Reference::discovered_offset, old); |
304 oopDesc::bs()->write_ref_field(java_lang_ref_Reference::discovered_addr(obj), old); |
|
305 } |
305 } |
306 } |
306 } |
307 } |
307 } |
308 |
308 |
309 // Parallel enqueue task |
309 // Parallel enqueue task |
380 p2i(_referent)); |
380 p2i(_referent)); |
381 } |
381 } |
382 |
382 |
383 void DiscoveredListIterator::remove() { |
383 void DiscoveredListIterator::remove() { |
384 assert(oopDesc::is_oop(_ref), "Dropping a bad reference"); |
384 assert(oopDesc::is_oop(_ref), "Dropping a bad reference"); |
385 oop_store_raw(_discovered_addr, NULL); |
385 RawAccess<>::oop_store(_discovered_addr, oop(NULL)); |
386 |
386 |
387 // First _prev_next ref actually points into DiscoveredList (gross). |
387 // First _prev_next ref actually points into DiscoveredList (gross). |
388 oop new_next; |
388 oop new_next; |
389 if (_next == _ref) { |
389 if (_next == _ref) { |
390 // At the end of the list, we should make _prev point to itself. |
390 // At the end of the list, we should make _prev point to itself. |
395 new_next = _next; |
395 new_next = _next; |
396 } |
396 } |
397 // Remove Reference object from discovered list. Note that G1 does not need a |
397 // Remove Reference object from discovered list. Note that G1 does not need a |
398 // pre-barrier here because we know the Reference has already been found/marked, |
398 // pre-barrier here because we know the Reference has already been found/marked, |
399 // that's how it ended up in the discovered list in the first place. |
399 // that's how it ended up in the discovered list in the first place. |
400 oop_store_raw(_prev_next, new_next); |
400 RawAccess<>::oop_store(_prev_next, new_next); |
401 NOT_PRODUCT(_removed++); |
401 NOT_PRODUCT(_removed++); |
402 _refs_list.dec_length(1); |
402 _refs_list.dec_length(1); |
403 } |
403 } |
404 |
404 |
405 void DiscoveredListIterator::clear_referent() { |
405 void DiscoveredListIterator::clear_referent() { |
406 oop_store_raw(_referent_addr, NULL); |
406 RawAccess<>::oop_store(_referent_addr, oop(NULL)); |
407 } |
407 } |
408 |
408 |
409 // NOTE: process_phase*() are largely similar, and at a high level |
409 // NOTE: process_phase*() are largely similar, and at a high level |
410 // merely iterate over the extant list applying a predicate to |
410 // merely iterate over the extant list applying a predicate to |
411 // each of its elements and possibly removing that element from the |
411 // each of its elements and possibly removing that element from the |
915 // discovered_addr. |
915 // discovered_addr. |
916 oop current_head = refs_list.head(); |
916 oop current_head = refs_list.head(); |
917 // The last ref must have its discovered field pointing to itself. |
917 // The last ref must have its discovered field pointing to itself. |
918 oop next_discovered = (current_head != NULL) ? current_head : obj; |
918 oop next_discovered = (current_head != NULL) ? current_head : obj; |
919 |
919 |
920 oop retest = oopDesc::atomic_compare_exchange_oop(next_discovered, discovered_addr, |
920 oop retest = RawAccess<>::oop_atomic_cmpxchg(next_discovered, discovered_addr, oop(NULL)); |
921 NULL); |
921 |
922 if (retest == NULL) { |
922 if (retest == NULL) { |
923 // This thread just won the right to enqueue the object. |
923 // This thread just won the right to enqueue the object. |
924 // We have separate lists for enqueueing, so no synchronization |
924 // We have separate lists for enqueueing, so no synchronization |
925 // is necessary. |
925 // is necessary. |
926 refs_list.set_head(obj); |
926 refs_list.set_head(obj); |
931 } else { |
931 } else { |
932 // If retest was non NULL, another thread beat us to it: |
932 // If retest was non NULL, another thread beat us to it: |
933 // The reference has already been discovered... |
933 // The reference has already been discovered... |
934 log_develop_trace(gc, ref)("Already discovered reference (" INTPTR_FORMAT ": %s)", |
934 log_develop_trace(gc, ref)("Already discovered reference (" INTPTR_FORMAT ": %s)", |
935 p2i(obj), obj->klass()->internal_name()); |
935 p2i(obj), obj->klass()->internal_name()); |
936 } |
936 } |
937 } |
937 } |
938 |
938 |
939 #ifndef PRODUCT |
939 #ifndef PRODUCT |
940 // Non-atomic (i.e. concurrent) discovery might allow us |
940 // Non-atomic (i.e. concurrent) discovery might allow us |
941 // to observe j.l.References with NULL referents, being those |
941 // to observe j.l.References with NULL referents, being those |
942 // cleared concurrently by mutators during (or after) discovery. |
942 // cleared concurrently by mutators during (or after) discovery. |
1074 oop current_head = list->head(); |
1074 oop current_head = list->head(); |
1075 // The last ref must have its discovered field pointing to itself. |
1075 // The last ref must have its discovered field pointing to itself. |
1076 oop next_discovered = (current_head != NULL) ? current_head : obj; |
1076 oop next_discovered = (current_head != NULL) ? current_head : obj; |
1077 |
1077 |
1078 assert(discovered == NULL, "control point invariant"); |
1078 assert(discovered == NULL, "control point invariant"); |
1079 oop_store_raw(discovered_addr, next_discovered); |
1079 RawAccess<>::oop_store(discovered_addr, next_discovered); |
1080 list->set_head(obj); |
1080 list->set_head(obj); |
1081 list->inc_length(1); |
1081 list->inc_length(1); |
1082 |
1082 |
1083 log_develop_trace(gc, ref)("Discovered reference (" INTPTR_FORMAT ": %s)", p2i(obj), obj->klass()->internal_name()); |
1083 log_develop_trace(gc, ref)("Discovered reference (" INTPTR_FORMAT ": %s)", p2i(obj), obj->klass()->internal_name()); |
1084 } |
1084 } |