|
1 /* |
|
2 /* |
|
3 * Copyright (c) 2015, Oracle and/or its affiliates. All rights reserved. |
|
4 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
|
5 * |
|
6 * This code is free software; you can redistribute it and/or modify it |
|
7 * under the terms of the GNU General Public License version 2 only, as |
|
8 * published by the Free Software Foundation. |
|
9 * |
|
10 * This code is distributed in the hope that it will be useful, but WITHOUT |
|
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
|
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
|
13 * version 2 for more details (a copy is included in the LICENSE file that |
|
14 * accompanied this code). |
|
15 * |
|
16 * You should have received a copy of the GNU General Public License version |
|
17 * 2 along with this work; if not, write to the Free Software Foundation, |
|
18 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. |
|
19 * |
|
20 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA |
|
21 * or visit www.oracle.com if you need additional information or have any |
|
22 * questions. |
|
23 * |
|
24 */ |
|
25 |
|
26 #ifndef SHARE_VM_OOPS_INSTANCEREFKLASS_INLINE_HPP |
|
27 #define SHARE_VM_OOPS_INSTANCEREFKLASS_INLINE_HPP |
|
28 |
|
29 #include "classfile/javaClasses.hpp" |
|
30 #include "memory/referenceProcessor.hpp" |
|
31 #include "oops/instanceRefKlass.hpp" |
|
32 #include "oops/instanceKlass.inline.hpp" |
|
33 #include "oops/oop.inline.hpp" |
|
34 #include "utilities/debug.hpp" |
|
35 #include "utilities/globalDefinitions.hpp" |
|
36 #include "utilities/macros.hpp" |
|
37 |
|
38 template <bool nv, typename T, class OopClosureType, class Contains> |
|
39 void InstanceRefKlass::oop_oop_iterate_ref_processing_specialized(oop obj, OopClosureType* closure, Contains& contains) { |
|
40 T* disc_addr = (T*)java_lang_ref_Reference::discovered_addr(obj); |
|
41 if (closure->apply_to_weak_ref_discovered_field()) { |
|
42 Devirtualizer<nv>::do_oop(closure, disc_addr); |
|
43 } |
|
44 |
|
45 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj); |
|
46 T heap_oop = oopDesc::load_heap_oop(referent_addr); |
|
47 ReferenceProcessor* rp = closure->_ref_processor; |
|
48 if (!oopDesc::is_null(heap_oop)) { |
|
49 oop referent = oopDesc::decode_heap_oop_not_null(heap_oop); |
|
50 if (!referent->is_gc_marked() && (rp != NULL) && |
|
51 rp->discover_reference(obj, reference_type())) { |
|
52 return; |
|
53 } else if (contains(referent_addr)) { |
|
54 // treat referent as normal oop |
|
55 Devirtualizer<nv>::do_oop(closure, referent_addr); |
|
56 } |
|
57 } |
|
58 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj); |
|
59 if (ReferenceProcessor::pending_list_uses_discovered_field()) { |
|
60 T next_oop = oopDesc::load_heap_oop(next_addr); |
|
61 // Treat discovered as normal oop, if ref is not "active" (next non-NULL) |
|
62 if (!oopDesc::is_null(next_oop) && contains(disc_addr)) { |
|
63 // i.e. ref is not "active" |
|
64 debug_only( |
|
65 if(TraceReferenceGC && PrintGCDetails) { |
|
66 gclog_or_tty->print_cr(" Process discovered as normal " |
|
67 PTR_FORMAT, p2i(disc_addr)); |
|
68 } |
|
69 ) |
|
70 Devirtualizer<nv>::do_oop(closure, disc_addr); |
|
71 } |
|
72 } else { |
|
73 // In the case of older JDKs which do not use the discovered field for |
|
74 // the pending list, an inactive ref (next != NULL) must always have a |
|
75 // NULL discovered field. |
|
76 debug_only( |
|
77 T next_oop = oopDesc::load_heap_oop(next_addr); |
|
78 T disc_oop = oopDesc::load_heap_oop(disc_addr); |
|
79 assert(oopDesc::is_null(next_oop) || oopDesc::is_null(disc_oop), |
|
80 err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL" |
|
81 "discovered field", p2i(obj))); |
|
82 ) |
|
83 } |
|
84 // treat next as normal oop |
|
85 if (contains(next_addr)) { |
|
86 Devirtualizer<nv>::do_oop(closure, next_addr); |
|
87 } |
|
88 } |
|
89 |
|
90 class AlwaysContains { |
|
91 public: |
|
92 template <typename T> bool operator()(T* p) const { return true; } |
|
93 }; |
|
94 static AlwaysContains always_contains; |
|
95 |
|
96 template <bool nv, class OopClosureType> |
|
97 void InstanceRefKlass::oop_oop_iterate_ref_processing(oop obj, OopClosureType* closure) { |
|
98 if (UseCompressedOops) { |
|
99 oop_oop_iterate_ref_processing_specialized<nv, narrowOop>(obj, closure, always_contains); |
|
100 } else { |
|
101 oop_oop_iterate_ref_processing_specialized<nv, oop>(obj, closure, always_contains); |
|
102 } |
|
103 } |
|
104 |
|
105 class MrContains { |
|
106 const MemRegion _mr; |
|
107 public: |
|
108 MrContains(MemRegion mr) : _mr(mr) {} |
|
109 template <typename T> bool operator()(T* p) const { return _mr.contains(p); } |
|
110 }; |
|
111 |
|
112 template <bool nv, class OopClosureType> |
|
113 void InstanceRefKlass::oop_oop_iterate_ref_processing_bounded(oop obj, OopClosureType* closure, MemRegion mr) { |
|
114 const MrContains contains(mr); |
|
115 if (UseCompressedOops) { |
|
116 oop_oop_iterate_ref_processing_specialized<nv, narrowOop>(obj, closure, contains); |
|
117 } else { |
|
118 oop_oop_iterate_ref_processing_specialized<nv, oop>(obj, closure, contains); |
|
119 } |
|
120 } |
|
121 |
|
122 template <bool nv, class OopClosureType> |
|
123 int InstanceRefKlass::oop_oop_iterate(oop obj, OopClosureType* closure) { |
|
124 // Get size before changing pointers |
|
125 int size = InstanceKlass::oop_oop_iterate<nv>(obj, closure); |
|
126 |
|
127 oop_oop_iterate_ref_processing<nv>(obj, closure); |
|
128 |
|
129 return size; |
|
130 } |
|
131 |
|
132 #if INCLUDE_ALL_GCS |
|
133 template <bool nv, class OopClosureType> |
|
134 int InstanceRefKlass:: |
|
135 oop_oop_iterate_reverse(oop obj, OopClosureType* closure) { |
|
136 // Get size before changing pointers |
|
137 int size = InstanceKlass::oop_oop_iterate_reverse<nv>(obj, closure); |
|
138 |
|
139 oop_oop_iterate_ref_processing<nv>(obj, closure); |
|
140 |
|
141 return size; |
|
142 } |
|
143 #endif // INCLUDE_ALL_GCS |
|
144 |
|
145 |
|
146 template <bool nv, class OopClosureType> |
|
147 int InstanceRefKlass::oop_oop_iterate_bounded(oop obj, OopClosureType* closure, MemRegion mr) { |
|
148 // Get size before changing pointers |
|
149 int size = InstanceKlass::oop_oop_iterate_bounded<nv>(obj, closure, mr); |
|
150 |
|
151 oop_oop_iterate_ref_processing_bounded<nv>(obj, closure, mr); |
|
152 |
|
153 return size; |
|
154 } |
|
155 |
|
156 // Macro to define InstanceRefKlass::oop_oop_iterate for virtual/nonvirtual for |
|
157 // all closures. Macros calling macros above for each oop size. |
|
158 |
|
159 #define InstanceRefKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix) \ |
|
160 \ |
|
161 int InstanceRefKlass::oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) { \ |
|
162 return oop_oop_iterate<nvs_to_bool(nv_suffix)>(obj, closure); \ |
|
163 } |
|
164 |
|
165 #if INCLUDE_ALL_GCS |
|
166 #define InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) \ |
|
167 \ |
|
168 int InstanceRefKlass::oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure) { \ |
|
169 return oop_oop_iterate_reverse<nvs_to_bool(nv_suffix)>(obj, closure); \ |
|
170 } |
|
171 #else |
|
172 #define InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) |
|
173 #endif |
|
174 |
|
175 |
|
176 #define InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix) \ |
|
177 \ |
|
178 int InstanceRefKlass::oop_oop_iterate##nv_suffix##_m(oop obj, OopClosureType* closure, MemRegion mr) { \ |
|
179 return oop_oop_iterate_bounded<nvs_to_bool(nv_suffix)>(obj, closure, mr); \ |
|
180 } |
|
181 |
|
182 #define ALL_INSTANCE_REF_KLASS_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix) \ |
|
183 InstanceRefKlass_OOP_OOP_ITERATE_DEFN( OopClosureType, nv_suffix) \ |
|
184 InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m( OopClosureType, nv_suffix) \ |
|
185 InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) |
|
186 |
|
187 |
|
188 #endif // SHARE_VM_OOPS_INSTANCEREFKLASS_INLINE_HPP |