23 */ |
23 */ |
24 |
24 |
25 #include "precompiled.hpp" |
25 #include "precompiled.hpp" |
26 #include "classfile/javaClasses.hpp" |
26 #include "classfile/javaClasses.hpp" |
27 #include "classfile/systemDictionary.hpp" |
27 #include "classfile/systemDictionary.hpp" |
28 #include "gc_implementation/shared/markSweep.inline.hpp" |
|
29 #include "gc_interface/collectedHeap.hpp" |
|
30 #include "gc_interface/collectedHeap.inline.hpp" |
28 #include "gc_interface/collectedHeap.inline.hpp" |
31 #include "memory/genCollectedHeap.hpp" |
29 #include "memory/genCollectedHeap.hpp" |
32 #include "memory/genOopClosures.inline.hpp" |
|
33 #include "memory/specialized_oop_closures.hpp" |
30 #include "memory/specialized_oop_closures.hpp" |
34 #include "oops/instanceRefKlass.hpp" |
31 #include "oops/instanceRefKlass.inline.hpp" |
35 #include "oops/oop.inline.hpp" |
32 #include "oops/oop.inline.hpp" |
36 #include "utilities/preserveException.hpp" |
33 #include "utilities/preserveException.hpp" |
37 #include "utilities/macros.hpp" |
34 #include "utilities/macros.hpp" |
38 #if INCLUDE_ALL_GCS |
|
39 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp" |
|
40 #include "gc_implementation/g1/g1OopClosures.inline.hpp" |
|
41 #include "gc_implementation/g1/g1RemSet.inline.hpp" |
|
42 #include "gc_implementation/g1/heapRegionManager.inline.hpp" |
|
43 #include "gc_implementation/parNew/parOopClosures.inline.hpp" |
|
44 #include "gc_implementation/parallelScavenge/psPromotionManager.inline.hpp" |
|
45 #include "gc_implementation/parallelScavenge/psScavenge.inline.hpp" |
|
46 #endif // INCLUDE_ALL_GCS |
|
47 |
35 |
48 PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC |
36 PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC |
49 |
|
50 template <class T> |
|
51 void specialized_oop_follow_contents(InstanceRefKlass* ref, oop obj) { |
|
52 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj); |
|
53 T heap_oop = oopDesc::load_heap_oop(referent_addr); |
|
54 debug_only( |
|
55 if(TraceReferenceGC && PrintGCDetails) { |
|
56 gclog_or_tty->print_cr("InstanceRefKlass::oop_follow_contents " INTPTR_FORMAT, (void *)obj); |
|
57 } |
|
58 ) |
|
59 if (!oopDesc::is_null(heap_oop)) { |
|
60 oop referent = oopDesc::decode_heap_oop_not_null(heap_oop); |
|
61 if (!referent->is_gc_marked() && |
|
62 MarkSweep::ref_processor()->discover_reference(obj, ref->reference_type())) { |
|
63 // reference was discovered, referent will be traversed later |
|
64 ref->InstanceKlass::oop_follow_contents(obj); |
|
65 debug_only( |
|
66 if(TraceReferenceGC && PrintGCDetails) { |
|
67 gclog_or_tty->print_cr(" Non NULL enqueued " INTPTR_FORMAT, (void *)obj); |
|
68 } |
|
69 ) |
|
70 return; |
|
71 } else { |
|
72 // treat referent as normal oop |
|
73 debug_only( |
|
74 if(TraceReferenceGC && PrintGCDetails) { |
|
75 gclog_or_tty->print_cr(" Non NULL normal " INTPTR_FORMAT, (void *)obj); |
|
76 } |
|
77 ) |
|
78 MarkSweep::mark_and_push(referent_addr); |
|
79 } |
|
80 } |
|
81 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj); |
|
82 if (ReferenceProcessor::pending_list_uses_discovered_field()) { |
|
83 // Treat discovered as normal oop, if ref is not "active", |
|
84 // i.e. if next is non-NULL. |
|
85 T next_oop = oopDesc::load_heap_oop(next_addr); |
|
86 if (!oopDesc::is_null(next_oop)) { // i.e. ref is not "active" |
|
87 T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj); |
|
88 debug_only( |
|
89 if(TraceReferenceGC && PrintGCDetails) { |
|
90 gclog_or_tty->print_cr(" Process discovered as normal " |
|
91 INTPTR_FORMAT, discovered_addr); |
|
92 } |
|
93 ) |
|
94 MarkSweep::mark_and_push(discovered_addr); |
|
95 } |
|
96 } else { |
|
97 #ifdef ASSERT |
|
98 // In the case of older JDKs which do not use the discovered |
|
99 // field for the pending list, an inactive ref (next != NULL) |
|
100 // must always have a NULL discovered field. |
|
101 oop next = oopDesc::load_decode_heap_oop(next_addr); |
|
102 oop discovered = java_lang_ref_Reference::discovered(obj); |
|
103 assert(oopDesc::is_null(next) || oopDesc::is_null(discovered), |
|
104 err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL discovered field", |
|
105 (oopDesc*)obj)); |
|
106 #endif |
|
107 } |
|
108 // treat next as normal oop. next is a link in the reference queue. |
|
109 debug_only( |
|
110 if(TraceReferenceGC && PrintGCDetails) { |
|
111 gclog_or_tty->print_cr(" Process next as normal " INTPTR_FORMAT, next_addr); |
|
112 } |
|
113 ) |
|
114 MarkSweep::mark_and_push(next_addr); |
|
115 ref->InstanceKlass::oop_follow_contents(obj); |
|
116 } |
|
117 |
|
118 void InstanceRefKlass::oop_follow_contents(oop obj) { |
|
119 if (UseCompressedOops) { |
|
120 specialized_oop_follow_contents<narrowOop>(this, obj); |
|
121 } else { |
|
122 specialized_oop_follow_contents<oop>(this, obj); |
|
123 } |
|
124 } |
|
125 |
|
126 #if INCLUDE_ALL_GCS |
|
127 template <class T> |
|
128 void specialized_oop_follow_contents(InstanceRefKlass* ref, |
|
129 ParCompactionManager* cm, |
|
130 oop obj) { |
|
131 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj); |
|
132 T heap_oop = oopDesc::load_heap_oop(referent_addr); |
|
133 debug_only( |
|
134 if(TraceReferenceGC && PrintGCDetails) { |
|
135 gclog_or_tty->print_cr("InstanceRefKlass::oop_follow_contents " INTPTR_FORMAT, (void *)obj); |
|
136 } |
|
137 ) |
|
138 if (!oopDesc::is_null(heap_oop)) { |
|
139 oop referent = oopDesc::decode_heap_oop_not_null(heap_oop); |
|
140 if (PSParallelCompact::mark_bitmap()->is_unmarked(referent) && |
|
141 PSParallelCompact::ref_processor()-> |
|
142 discover_reference(obj, ref->reference_type())) { |
|
143 // reference already enqueued, referent will be traversed later |
|
144 ref->InstanceKlass::oop_follow_contents(cm, obj); |
|
145 debug_only( |
|
146 if(TraceReferenceGC && PrintGCDetails) { |
|
147 gclog_or_tty->print_cr(" Non NULL enqueued " INTPTR_FORMAT, (void *)obj); |
|
148 } |
|
149 ) |
|
150 return; |
|
151 } else { |
|
152 // treat referent as normal oop |
|
153 debug_only( |
|
154 if(TraceReferenceGC && PrintGCDetails) { |
|
155 gclog_or_tty->print_cr(" Non NULL normal " INTPTR_FORMAT, (void *)obj); |
|
156 } |
|
157 ) |
|
158 PSParallelCompact::mark_and_push(cm, referent_addr); |
|
159 } |
|
160 } |
|
161 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj); |
|
162 if (ReferenceProcessor::pending_list_uses_discovered_field()) { |
|
163 // Treat discovered as normal oop, if ref is not "active", |
|
164 // i.e. if next is non-NULL. |
|
165 T next_oop = oopDesc::load_heap_oop(next_addr); |
|
166 if (!oopDesc::is_null(next_oop)) { // i.e. ref is not "active" |
|
167 T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj); |
|
168 debug_only( |
|
169 if(TraceReferenceGC && PrintGCDetails) { |
|
170 gclog_or_tty->print_cr(" Process discovered as normal " |
|
171 INTPTR_FORMAT, discovered_addr); |
|
172 } |
|
173 ) |
|
174 PSParallelCompact::mark_and_push(cm, discovered_addr); |
|
175 } |
|
176 } else { |
|
177 #ifdef ASSERT |
|
178 // In the case of older JDKs which do not use the discovered |
|
179 // field for the pending list, an inactive ref (next != NULL) |
|
180 // must always have a NULL discovered field. |
|
181 T next = oopDesc::load_heap_oop(next_addr); |
|
182 oop discovered = java_lang_ref_Reference::discovered(obj); |
|
183 assert(oopDesc::is_null(next) || oopDesc::is_null(discovered), |
|
184 err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL discovered field", |
|
185 (oopDesc*)obj)); |
|
186 #endif |
|
187 } |
|
188 PSParallelCompact::mark_and_push(cm, next_addr); |
|
189 ref->InstanceKlass::oop_follow_contents(cm, obj); |
|
190 } |
|
191 |
|
192 void InstanceRefKlass::oop_follow_contents(ParCompactionManager* cm, |
|
193 oop obj) { |
|
194 if (UseCompressedOops) { |
|
195 specialized_oop_follow_contents<narrowOop>(this, cm, obj); |
|
196 } else { |
|
197 specialized_oop_follow_contents<oop>(this, cm, obj); |
|
198 } |
|
199 } |
|
200 #endif // INCLUDE_ALL_GCS |
|
201 |
|
202 #ifdef ASSERT |
|
203 template <class T> void trace_reference_gc(const char *s, oop obj, |
|
204 T* referent_addr, |
|
205 T* next_addr, |
|
206 T* discovered_addr) { |
|
207 if(TraceReferenceGC && PrintGCDetails) { |
|
208 gclog_or_tty->print_cr("%s obj " INTPTR_FORMAT, s, (address)obj); |
|
209 gclog_or_tty->print_cr(" referent_addr/* " INTPTR_FORMAT " / " |
|
210 INTPTR_FORMAT, referent_addr, |
|
211 referent_addr ? |
|
212 (address)oopDesc::load_decode_heap_oop(referent_addr) : NULL); |
|
213 gclog_or_tty->print_cr(" next_addr/* " INTPTR_FORMAT " / " |
|
214 INTPTR_FORMAT, next_addr, |
|
215 next_addr ? (address)oopDesc::load_decode_heap_oop(next_addr) : NULL); |
|
216 gclog_or_tty->print_cr(" discovered_addr/* " INTPTR_FORMAT " / " |
|
217 INTPTR_FORMAT, discovered_addr, |
|
218 discovered_addr ? |
|
219 (address)oopDesc::load_decode_heap_oop(discovered_addr) : NULL); |
|
220 } |
|
221 } |
|
222 #endif |
|
223 |
|
224 template <class T> void specialized_oop_adjust_pointers(InstanceRefKlass *ref, oop obj) { |
|
225 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj); |
|
226 MarkSweep::adjust_pointer(referent_addr); |
|
227 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj); |
|
228 MarkSweep::adjust_pointer(next_addr); |
|
229 T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj); |
|
230 MarkSweep::adjust_pointer(discovered_addr); |
|
231 debug_only(trace_reference_gc("InstanceRefKlass::oop_adjust_pointers", obj, |
|
232 referent_addr, next_addr, discovered_addr);) |
|
233 } |
|
234 |
|
235 int InstanceRefKlass::oop_adjust_pointers(oop obj) { |
|
236 int size = size_helper(); |
|
237 InstanceKlass::oop_adjust_pointers(obj); |
|
238 |
|
239 if (UseCompressedOops) { |
|
240 specialized_oop_adjust_pointers<narrowOop>(this, obj); |
|
241 } else { |
|
242 specialized_oop_adjust_pointers<oop>(this, obj); |
|
243 } |
|
244 return size; |
|
245 } |
|
246 |
|
247 #define InstanceRefKlass_SPECIALIZED_OOP_ITERATE(T, nv_suffix, contains) \ |
|
248 T* disc_addr = (T*)java_lang_ref_Reference::discovered_addr(obj); \ |
|
249 if (closure->apply_to_weak_ref_discovered_field()) { \ |
|
250 closure->do_oop##nv_suffix(disc_addr); \ |
|
251 } \ |
|
252 \ |
|
253 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj); \ |
|
254 T heap_oop = oopDesc::load_heap_oop(referent_addr); \ |
|
255 ReferenceProcessor* rp = closure->_ref_processor; \ |
|
256 if (!oopDesc::is_null(heap_oop)) { \ |
|
257 oop referent = oopDesc::decode_heap_oop_not_null(heap_oop); \ |
|
258 if (!referent->is_gc_marked() && (rp != NULL) && \ |
|
259 rp->discover_reference(obj, reference_type())) { \ |
|
260 return size; \ |
|
261 } else if (contains(referent_addr)) { \ |
|
262 /* treat referent as normal oop */ \ |
|
263 closure->do_oop##nv_suffix(referent_addr); \ |
|
264 } \ |
|
265 } \ |
|
266 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj); \ |
|
267 if (ReferenceProcessor::pending_list_uses_discovered_field()) { \ |
|
268 T next_oop = oopDesc::load_heap_oop(next_addr); \ |
|
269 /* Treat discovered as normal oop, if ref is not "active" (next non-NULL) */\ |
|
270 if (!oopDesc::is_null(next_oop) && contains(disc_addr)) { \ |
|
271 /* i.e. ref is not "active" */ \ |
|
272 debug_only( \ |
|
273 if(TraceReferenceGC && PrintGCDetails) { \ |
|
274 gclog_or_tty->print_cr(" Process discovered as normal " \ |
|
275 INTPTR_FORMAT, disc_addr); \ |
|
276 } \ |
|
277 ) \ |
|
278 closure->do_oop##nv_suffix(disc_addr); \ |
|
279 } \ |
|
280 } else { \ |
|
281 /* In the case of older JDKs which do not use the discovered field for */ \ |
|
282 /* the pending list, an inactive ref (next != NULL) must always have a */ \ |
|
283 /* NULL discovered field. */ \ |
|
284 debug_only( \ |
|
285 T next_oop = oopDesc::load_heap_oop(next_addr); \ |
|
286 T disc_oop = oopDesc::load_heap_oop(disc_addr); \ |
|
287 assert(oopDesc::is_null(next_oop) || oopDesc::is_null(disc_oop), \ |
|
288 err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL" \ |
|
289 "discovered field", (oopDesc*)obj)); \ |
|
290 ) \ |
|
291 } \ |
|
292 /* treat next as normal oop */ \ |
|
293 if (contains(next_addr)) { \ |
|
294 closure->do_oop##nv_suffix(next_addr); \ |
|
295 } \ |
|
296 return size; \ |
|
297 |
|
298 |
|
299 template <class T> bool contains(T *t) { return true; } |
|
300 |
|
301 // Macro to define InstanceRefKlass::oop_oop_iterate for virtual/nonvirtual for |
|
302 // all closures. Macros calling macros above for each oop size. |
|
303 |
|
304 #define InstanceRefKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix) \ |
|
305 \ |
|
306 int InstanceRefKlass:: \ |
|
307 oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) { \ |
|
308 /* Get size before changing pointers */ \ |
|
309 int size = InstanceKlass::oop_oop_iterate##nv_suffix(obj, closure); \ |
|
310 \ |
|
311 if (UseCompressedOops) { \ |
|
312 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, contains); \ |
|
313 } else { \ |
|
314 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, contains); \ |
|
315 } \ |
|
316 } |
|
317 |
|
318 #if INCLUDE_ALL_GCS |
|
319 #define InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) \ |
|
320 \ |
|
321 int InstanceRefKlass:: \ |
|
322 oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure) { \ |
|
323 /* Get size before changing pointers */ \ |
|
324 int size = InstanceKlass::oop_oop_iterate_backwards##nv_suffix(obj, closure); \ |
|
325 \ |
|
326 if (UseCompressedOops) { \ |
|
327 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, contains); \ |
|
328 } else { \ |
|
329 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, contains); \ |
|
330 } \ |
|
331 } |
|
332 #endif // INCLUDE_ALL_GCS |
|
333 |
|
334 |
|
335 #define InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix) \ |
|
336 \ |
|
337 int InstanceRefKlass:: \ |
|
338 oop_oop_iterate##nv_suffix##_m(oop obj, \ |
|
339 OopClosureType* closure, \ |
|
340 MemRegion mr) { \ |
|
341 int size = InstanceKlass::oop_oop_iterate##nv_suffix##_m(obj, closure, mr); \ |
|
342 if (UseCompressedOops) { \ |
|
343 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, mr.contains); \ |
|
344 } else { \ |
|
345 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, mr.contains); \ |
|
346 } \ |
|
347 } |
|
348 |
|
349 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_DEFN) |
|
350 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_DEFN) |
|
351 #if INCLUDE_ALL_GCS |
|
352 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN) |
|
353 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN) |
|
354 #endif // INCLUDE_ALL_GCS |
|
355 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m) |
|
356 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m) |
|
357 |
|
358 #if INCLUDE_ALL_GCS |
|
359 template <class T> |
|
360 void specialized_oop_push_contents(InstanceRefKlass *ref, |
|
361 PSPromotionManager* pm, oop obj) { |
|
362 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj); |
|
363 if (PSScavenge::should_scavenge(referent_addr)) { |
|
364 ReferenceProcessor* rp = PSScavenge::reference_processor(); |
|
365 if (rp->discover_reference(obj, ref->reference_type())) { |
|
366 // reference already enqueued, referent and next will be traversed later |
|
367 ref->InstanceKlass::oop_push_contents(pm, obj); |
|
368 return; |
|
369 } else { |
|
370 // treat referent as normal oop |
|
371 pm->claim_or_forward_depth(referent_addr); |
|
372 } |
|
373 } |
|
374 // Treat discovered as normal oop, if ref is not "active", |
|
375 // i.e. if next is non-NULL. |
|
376 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj); |
|
377 if (ReferenceProcessor::pending_list_uses_discovered_field()) { |
|
378 T next_oop = oopDesc::load_heap_oop(next_addr); |
|
379 if (!oopDesc::is_null(next_oop)) { // i.e. ref is not "active" |
|
380 T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj); |
|
381 debug_only( |
|
382 if(TraceReferenceGC && PrintGCDetails) { |
|
383 gclog_or_tty->print_cr(" Process discovered as normal " |
|
384 INTPTR_FORMAT, discovered_addr); |
|
385 } |
|
386 ) |
|
387 if (PSScavenge::should_scavenge(discovered_addr)) { |
|
388 pm->claim_or_forward_depth(discovered_addr); |
|
389 } |
|
390 } |
|
391 } else { |
|
392 #ifdef ASSERT |
|
393 // In the case of older JDKs which do not use the discovered |
|
394 // field for the pending list, an inactive ref (next != NULL) |
|
395 // must always have a NULL discovered field. |
|
396 oop next = oopDesc::load_decode_heap_oop(next_addr); |
|
397 oop discovered = java_lang_ref_Reference::discovered(obj); |
|
398 assert(oopDesc::is_null(next) || oopDesc::is_null(discovered), |
|
399 err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL discovered field", |
|
400 (oopDesc*)obj)); |
|
401 #endif |
|
402 } |
|
403 |
|
404 // Treat next as normal oop; next is a link in the reference queue. |
|
405 if (PSScavenge::should_scavenge(next_addr)) { |
|
406 pm->claim_or_forward_depth(next_addr); |
|
407 } |
|
408 ref->InstanceKlass::oop_push_contents(pm, obj); |
|
409 } |
|
410 |
|
411 void InstanceRefKlass::oop_push_contents(PSPromotionManager* pm, oop obj) { |
|
412 if (UseCompressedOops) { |
|
413 specialized_oop_push_contents<narrowOop>(this, pm, obj); |
|
414 } else { |
|
415 specialized_oop_push_contents<oop>(this, pm, obj); |
|
416 } |
|
417 } |
|
418 |
|
419 template <class T> |
|
420 void specialized_oop_update_pointers(InstanceRefKlass *ref, |
|
421 ParCompactionManager* cm, oop obj) { |
|
422 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj); |
|
423 PSParallelCompact::adjust_pointer(referent_addr); |
|
424 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj); |
|
425 PSParallelCompact::adjust_pointer(next_addr); |
|
426 T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj); |
|
427 PSParallelCompact::adjust_pointer(discovered_addr); |
|
428 debug_only(trace_reference_gc("InstanceRefKlass::oop_update_ptrs", obj, |
|
429 referent_addr, next_addr, discovered_addr);) |
|
430 } |
|
431 |
|
432 int InstanceRefKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) { |
|
433 InstanceKlass::oop_update_pointers(cm, obj); |
|
434 if (UseCompressedOops) { |
|
435 specialized_oop_update_pointers<narrowOop>(this, cm, obj); |
|
436 } else { |
|
437 specialized_oop_update_pointers<oop>(this, cm, obj); |
|
438 } |
|
439 return size_helper(); |
|
440 } |
|
441 #endif // INCLUDE_ALL_GCS |
|
442 |
37 |
443 void InstanceRefKlass::update_nonstatic_oop_maps(Klass* k) { |
38 void InstanceRefKlass::update_nonstatic_oop_maps(Klass* k) { |
444 // Clear the nonstatic oop-map entries corresponding to referent |
39 // Clear the nonstatic oop-map entries corresponding to referent |
445 // and nextPending field. They are treated specially by the |
40 // and nextPending field. They are treated specially by the |
446 // garbage collector. |
41 // garbage collector. |