author | johnc |
Thu, 30 Apr 2009 15:07:53 -0700 | |
changeset 2741 | 34e2a243d69a |
parent 2344 | f2e09ba7ceab |
child 3261 | c7d5aae8d3f7 |
permissions | -rw-r--r-- |
1374 | 1 |
/* |
1623 | 2 |
* Copyright 2001-2008 Sun Microsystems, Inc. All Rights Reserved. |
1374 | 3 |
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
4 |
* |
|
5 |
* This code is free software; you can redistribute it and/or modify it |
|
6 |
* under the terms of the GNU General Public License version 2 only, as |
|
7 |
* published by the Free Software Foundation. |
|
8 |
* |
|
9 |
* This code is distributed in the hope that it will be useful, but WITHOUT |
|
10 |
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
|
11 |
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
|
12 |
* version 2 for more details (a copy is included in the LICENSE file that |
|
13 |
* accompanied this code). |
|
14 |
* |
|
15 |
* You should have received a copy of the GNU General Public License version |
|
16 |
* 2 along with this work; if not, write to the Free Software Foundation, |
|
17 |
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. |
|
18 |
* |
|
19 |
* Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara, |
|
20 |
* CA 95054 USA or visit www.sun.com if you need additional information or |
|
21 |
* have any questions. |
|
22 |
* |
|
23 |
*/ |
|
24 |
||
25 |
#include "incls/_precompiled.incl" |
|
26 |
#include "incls/_g1MarkSweep.cpp.incl" |
|
27 |
||
28 |
class HeapRegion; |
|
29 |
||
30 |
void G1MarkSweep::invoke_at_safepoint(ReferenceProcessor* rp, |
|
31 |
bool clear_all_softrefs) { |
|
32 |
assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint"); |
|
33 |
||
34 |
// hook up weak ref data so it can be used during Mark-Sweep |
|
35 |
assert(GenMarkSweep::ref_processor() == NULL, "no stomping"); |
|
1606
dcf9714addbe
6684579: SoftReference processing can be made more efficient
ysr
parents:
1390
diff
changeset
|
36 |
assert(rp != NULL, "should be non-NULL"); |
1374 | 37 |
GenMarkSweep::_ref_processor = rp; |
1610
5dddd195cc86
6778647: snap(), snap_policy() should be renamed setup(), setup_policy()
ysr
parents:
1606
diff
changeset
|
38 |
rp->setup_policy(clear_all_softrefs); |
1374 | 39 |
|
40 |
// When collecting the permanent generation methodOops may be moving, |
|
41 |
// so we either have to flush all bcp data or convert it into bci. |
|
42 |
CodeCache::gc_prologue(); |
|
43 |
Threads::gc_prologue(); |
|
44 |
||
45 |
// Increment the invocation count for the permanent generation, since it is |
|
46 |
// implicitly collected whenever we do a full mark sweep collection. |
|
47 |
SharedHeap* sh = SharedHeap::heap(); |
|
48 |
sh->perm_gen()->stat_record()->invocations++; |
|
49 |
||
50 |
bool marked_for_unloading = false; |
|
51 |
||
52 |
allocate_stacks(); |
|
53 |
||
1390
4c473c5aee14
6735416: G1: runThese javasoft.sqe.tests.lang.thrd011.thrd01101.thrd01101 fails
iveresov
parents:
1374
diff
changeset
|
54 |
// We should save the marks of the currently locked biased monitors. |
4c473c5aee14
6735416: G1: runThese javasoft.sqe.tests.lang.thrd011.thrd01101.thrd01101 fails
iveresov
parents:
1374
diff
changeset
|
55 |
// The marking doesn't preserve the marks of biased objects. |
4c473c5aee14
6735416: G1: runThese javasoft.sqe.tests.lang.thrd011.thrd01101.thrd01101 fails
iveresov
parents:
1374
diff
changeset
|
56 |
BiasedLocking::preserve_marks(); |
4c473c5aee14
6735416: G1: runThese javasoft.sqe.tests.lang.thrd011.thrd01101.thrd01101 fails
iveresov
parents:
1374
diff
changeset
|
57 |
|
1374 | 58 |
mark_sweep_phase1(marked_for_unloading, clear_all_softrefs); |
59 |
||
2741 | 60 |
if (VerifyDuringGC) { |
1374 | 61 |
G1CollectedHeap* g1h = G1CollectedHeap::heap(); |
62 |
g1h->checkConcurrentMark(); |
|
63 |
} |
|
64 |
||
65 |
mark_sweep_phase2(); |
|
66 |
||
67 |
// Don't add any more derived pointers during phase3 |
|
68 |
COMPILER2_PRESENT(DerivedPointerTable::set_active(false)); |
|
69 |
||
70 |
mark_sweep_phase3(); |
|
71 |
||
72 |
mark_sweep_phase4(); |
|
73 |
||
74 |
GenMarkSweep::restore_marks(); |
|
1390
4c473c5aee14
6735416: G1: runThese javasoft.sqe.tests.lang.thrd011.thrd01101.thrd01101 fails
iveresov
parents:
1374
diff
changeset
|
75 |
BiasedLocking::restore_marks(); |
1374 | 76 |
GenMarkSweep::deallocate_stacks(); |
77 |
||
78 |
// We must invalidate the perm-gen rs, so that it gets rebuilt. |
|
79 |
GenRemSet* rs = sh->rem_set(); |
|
80 |
rs->invalidate(sh->perm_gen()->used_region(), true /*whole_heap*/); |
|
81 |
||
82 |
// "free at last gc" is calculated from these. |
|
83 |
// CHF: cheating for now!!! |
|
84 |
// Universe::set_heap_capacity_at_last_gc(Universe::heap()->capacity()); |
|
85 |
// Universe::set_heap_used_at_last_gc(Universe::heap()->used()); |
|
86 |
||
87 |
Threads::gc_epilogue(); |
|
88 |
CodeCache::gc_epilogue(); |
|
89 |
||
90 |
// refs processing: clean slate |
|
91 |
GenMarkSweep::_ref_processor = NULL; |
|
92 |
} |
|
93 |
||
94 |
||
95 |
void G1MarkSweep::allocate_stacks() { |
|
96 |
GenMarkSweep::_preserved_count_max = 0; |
|
97 |
GenMarkSweep::_preserved_marks = NULL; |
|
98 |
GenMarkSweep::_preserved_count = 0; |
|
99 |
GenMarkSweep::_preserved_mark_stack = NULL; |
|
100 |
GenMarkSweep::_preserved_oop_stack = NULL; |
|
101 |
||
102 |
GenMarkSweep::_marking_stack = |
|
103 |
new (ResourceObj::C_HEAP) GrowableArray<oop>(4000, true); |
|
104 |
||
105 |
size_t size = SystemDictionary::number_of_classes() * 2; |
|
106 |
GenMarkSweep::_revisit_klass_stack = |
|
107 |
new (ResourceObj::C_HEAP) GrowableArray<Klass*>((int)size, true); |
|
108 |
} |
|
109 |
||
110 |
void G1MarkSweep::mark_sweep_phase1(bool& marked_for_unloading, |
|
111 |
bool clear_all_softrefs) { |
|
112 |
// Recursively traverse all live objects and mark them |
|
113 |
EventMark m("1 mark object"); |
|
114 |
TraceTime tm("phase 1", PrintGC && Verbose, true, gclog_or_tty); |
|
115 |
GenMarkSweep::trace(" 1"); |
|
116 |
||
117 |
SharedHeap* sh = SharedHeap::heap(); |
|
118 |
||
119 |
sh->process_strong_roots(true, // Collecting permanent generation. |
|
120 |
SharedHeap::SO_SystemClasses, |
|
121 |
&GenMarkSweep::follow_root_closure, |
|
122 |
&GenMarkSweep::follow_root_closure); |
|
123 |
||
124 |
// Process reference objects found during marking |
|
1606
dcf9714addbe
6684579: SoftReference processing can be made more efficient
ysr
parents:
1390
diff
changeset
|
125 |
ReferenceProcessor* rp = GenMarkSweep::ref_processor(); |
1610
5dddd195cc86
6778647: snap(), snap_policy() should be renamed setup(), setup_policy()
ysr
parents:
1606
diff
changeset
|
126 |
rp->setup_policy(clear_all_softrefs); |
1606
dcf9714addbe
6684579: SoftReference processing can be made more efficient
ysr
parents:
1390
diff
changeset
|
127 |
rp->process_discovered_references(&GenMarkSweep::is_alive, |
dcf9714addbe
6684579: SoftReference processing can be made more efficient
ysr
parents:
1390
diff
changeset
|
128 |
&GenMarkSweep::keep_alive, |
dcf9714addbe
6684579: SoftReference processing can be made more efficient
ysr
parents:
1390
diff
changeset
|
129 |
&GenMarkSweep::follow_stack_closure, |
dcf9714addbe
6684579: SoftReference processing can be made more efficient
ysr
parents:
1390
diff
changeset
|
130 |
NULL); |
1374 | 131 |
|
132 |
// Follow system dictionary roots and unload classes |
|
133 |
bool purged_class = SystemDictionary::do_unloading(&GenMarkSweep::is_alive); |
|
134 |
assert(GenMarkSweep::_marking_stack->is_empty(), |
|
135 |
"stack should be empty by now"); |
|
136 |
||
137 |
// Follow code cache roots (has to be done after system dictionary, |
|
138 |
// assumes all live klasses are marked) |
|
139 |
CodeCache::do_unloading(&GenMarkSweep::is_alive, |
|
140 |
&GenMarkSweep::keep_alive, |
|
141 |
purged_class); |
|
142 |
GenMarkSweep::follow_stack(); |
|
143 |
||
144 |
// Update subklass/sibling/implementor links of live klasses |
|
145 |
GenMarkSweep::follow_weak_klass_links(); |
|
146 |
assert(GenMarkSweep::_marking_stack->is_empty(), |
|
147 |
"stack should be empty by now"); |
|
148 |
||
149 |
// Visit symbol and interned string tables and delete unmarked oops |
|
150 |
SymbolTable::unlink(&GenMarkSweep::is_alive); |
|
151 |
StringTable::unlink(&GenMarkSweep::is_alive); |
|
152 |
||
153 |
assert(GenMarkSweep::_marking_stack->is_empty(), |
|
154 |
"stack should be empty by now"); |
|
155 |
} |
|
156 |
||
157 |
class G1PrepareCompactClosure: public HeapRegionClosure { |
|
158 |
ModRefBarrierSet* _mrbs; |
|
159 |
CompactPoint _cp; |
|
160 |
||
161 |
void free_humongous_region(HeapRegion* hr) { |
|
162 |
HeapWord* bot = hr->bottom(); |
|
163 |
HeapWord* end = hr->end(); |
|
164 |
assert(hr->startsHumongous(), |
|
165 |
"Only the start of a humongous region should be freed."); |
|
166 |
G1CollectedHeap::heap()->free_region(hr); |
|
167 |
hr->prepare_for_compaction(&_cp); |
|
168 |
// Also clear the part of the card table that will be unused after |
|
169 |
// compaction. |
|
170 |
_mrbs->clear(MemRegion(hr->compaction_top(), hr->end())); |
|
171 |
} |
|
172 |
||
173 |
public: |
|
2344 | 174 |
G1PrepareCompactClosure(CompactibleSpace* cs) : |
1374 | 175 |
_cp(NULL, cs, cs->initialize_threshold()), |
2344 | 176 |
_mrbs(G1CollectedHeap::heap()->mr_bs()) |
1374 | 177 |
{} |
178 |
bool doHeapRegion(HeapRegion* hr) { |
|
179 |
if (hr->isHumongous()) { |
|
180 |
if (hr->startsHumongous()) { |
|
181 |
oop obj = oop(hr->bottom()); |
|
182 |
if (obj->is_gc_marked()) { |
|
183 |
obj->forward_to(obj); |
|
184 |
} else { |
|
185 |
free_humongous_region(hr); |
|
186 |
} |
|
187 |
} else { |
|
188 |
assert(hr->continuesHumongous(), "Invalid humongous."); |
|
189 |
} |
|
190 |
} else { |
|
191 |
hr->prepare_for_compaction(&_cp); |
|
192 |
// Also clear the part of the card table that will be unused after |
|
193 |
// compaction. |
|
194 |
_mrbs->clear(MemRegion(hr->compaction_top(), hr->end())); |
|
195 |
} |
|
196 |
return false; |
|
197 |
} |
|
198 |
}; |
|
2344 | 199 |
|
200 |
// Finds the first HeapRegion. |
|
1374 | 201 |
class FindFirstRegionClosure: public HeapRegionClosure { |
202 |
HeapRegion* _a_region; |
|
203 |
public: |
|
2344 | 204 |
FindFirstRegionClosure() : _a_region(NULL) {} |
1374 | 205 |
bool doHeapRegion(HeapRegion* r) { |
2344 | 206 |
_a_region = r; |
207 |
return true; |
|
1374 | 208 |
} |
209 |
HeapRegion* result() { return _a_region; } |
|
210 |
}; |
|
211 |
||
212 |
void G1MarkSweep::mark_sweep_phase2() { |
|
213 |
// Now all live objects are marked, compute the new object addresses. |
|
214 |
||
215 |
// It is imperative that we traverse perm_gen LAST. If dead space is |
|
216 |
// allowed a range of dead object may get overwritten by a dead int |
|
217 |
// array. If perm_gen is not traversed last a klassOop may get |
|
218 |
// overwritten. This is fine since it is dead, but if the class has dead |
|
219 |
// instances we have to skip them, and in order to find their size we |
|
220 |
// need the klassOop! |
|
221 |
// |
|
222 |
// It is not required that we traverse spaces in the same order in |
|
223 |
// phase2, phase3 and phase4, but the ValidateMarkSweep live oops |
|
224 |
// tracking expects us to do so. See comment under phase4. |
|
225 |
||
226 |
G1CollectedHeap* g1h = G1CollectedHeap::heap(); |
|
227 |
Generation* pg = g1h->perm_gen(); |
|
228 |
||
229 |
EventMark m("2 compute new addresses"); |
|
230 |
TraceTime tm("phase 2", PrintGC && Verbose, true, gclog_or_tty); |
|
231 |
GenMarkSweep::trace("2"); |
|
232 |
||
2344 | 233 |
FindFirstRegionClosure cl; |
1374 | 234 |
g1h->heap_region_iterate(&cl); |
235 |
HeapRegion *r = cl.result(); |
|
236 |
CompactibleSpace* sp = r; |
|
237 |
if (r->isHumongous() && oop(r->bottom())->is_gc_marked()) { |
|
238 |
sp = r->next_compaction_space(); |
|
239 |
} |
|
240 |
||
2344 | 241 |
G1PrepareCompactClosure blk(sp); |
1374 | 242 |
g1h->heap_region_iterate(&blk); |
243 |
||
244 |
CompactPoint perm_cp(pg, NULL, NULL); |
|
245 |
pg->prepare_for_compaction(&perm_cp); |
|
246 |
} |
|
247 |
||
248 |
class G1AdjustPointersClosure: public HeapRegionClosure { |
|
249 |
public: |
|
250 |
bool doHeapRegion(HeapRegion* r) { |
|
251 |
if (r->isHumongous()) { |
|
252 |
if (r->startsHumongous()) { |
|
253 |
// We must adjust the pointers on the single H object. |
|
254 |
oop obj = oop(r->bottom()); |
|
255 |
debug_only(GenMarkSweep::track_interior_pointers(obj)); |
|
256 |
// point all the oops to the new location |
|
257 |
obj->adjust_pointers(); |
|
258 |
debug_only(GenMarkSweep::check_interior_pointers()); |
|
259 |
} |
|
260 |
} else { |
|
261 |
// This really ought to be "as_CompactibleSpace"... |
|
262 |
r->adjust_pointers(); |
|
263 |
} |
|
264 |
return false; |
|
265 |
} |
|
266 |
}; |
|
267 |
||
268 |
void G1MarkSweep::mark_sweep_phase3() { |
|
269 |
G1CollectedHeap* g1h = G1CollectedHeap::heap(); |
|
270 |
Generation* pg = g1h->perm_gen(); |
|
271 |
||
272 |
// Adjust the pointers to reflect the new locations |
|
273 |
EventMark m("3 adjust pointers"); |
|
274 |
TraceTime tm("phase 3", PrintGC && Verbose, true, gclog_or_tty); |
|
275 |
GenMarkSweep::trace("3"); |
|
276 |
||
277 |
SharedHeap* sh = SharedHeap::heap(); |
|
278 |
||
279 |
sh->process_strong_roots(true, // Collecting permanent generation. |
|
280 |
SharedHeap::SO_AllClasses, |
|
281 |
&GenMarkSweep::adjust_root_pointer_closure, |
|
282 |
&GenMarkSweep::adjust_pointer_closure); |
|
283 |
||
284 |
g1h->ref_processor()->weak_oops_do(&GenMarkSweep::adjust_root_pointer_closure); |
|
285 |
||
286 |
// Now adjust pointers in remaining weak roots. (All of which should |
|
287 |
// have been cleared if they pointed to non-surviving objects.) |
|
288 |
g1h->g1_process_weak_roots(&GenMarkSweep::adjust_root_pointer_closure, |
|
289 |
&GenMarkSweep::adjust_pointer_closure); |
|
290 |
||
291 |
GenMarkSweep::adjust_marks(); |
|
292 |
||
293 |
G1AdjustPointersClosure blk; |
|
294 |
g1h->heap_region_iterate(&blk); |
|
295 |
pg->adjust_pointers(); |
|
296 |
} |
|
297 |
||
298 |
class G1SpaceCompactClosure: public HeapRegionClosure { |
|
299 |
public: |
|
300 |
G1SpaceCompactClosure() {} |
|
301 |
||
302 |
bool doHeapRegion(HeapRegion* hr) { |
|
303 |
if (hr->isHumongous()) { |
|
304 |
if (hr->startsHumongous()) { |
|
305 |
oop obj = oop(hr->bottom()); |
|
306 |
if (obj->is_gc_marked()) { |
|
307 |
obj->init_mark(); |
|
308 |
} else { |
|
309 |
assert(hr->is_empty(), "Should have been cleared in phase 2."); |
|
310 |
} |
|
311 |
hr->reset_during_compaction(); |
|
312 |
} |
|
313 |
} else { |
|
314 |
hr->compact(); |
|
315 |
} |
|
316 |
return false; |
|
317 |
} |
|
318 |
}; |
|
319 |
||
320 |
void G1MarkSweep::mark_sweep_phase4() { |
|
321 |
// All pointers are now adjusted, move objects accordingly |
|
322 |
||
323 |
// It is imperative that we traverse perm_gen first in phase4. All |
|
324 |
// classes must be allocated earlier than their instances, and traversing |
|
325 |
// perm_gen first makes sure that all klassOops have moved to their new |
|
326 |
// location before any instance does a dispatch through it's klass! |
|
327 |
||
328 |
// The ValidateMarkSweep live oops tracking expects us to traverse spaces |
|
329 |
// in the same order in phase2, phase3 and phase4. We don't quite do that |
|
330 |
// here (perm_gen first rather than last), so we tell the validate code |
|
331 |
// to use a higher index (saved from phase2) when verifying perm_gen. |
|
332 |
G1CollectedHeap* g1h = G1CollectedHeap::heap(); |
|
333 |
Generation* pg = g1h->perm_gen(); |
|
334 |
||
335 |
EventMark m("4 compact heap"); |
|
336 |
TraceTime tm("phase 4", PrintGC && Verbose, true, gclog_or_tty); |
|
337 |
GenMarkSweep::trace("4"); |
|
338 |
||
339 |
pg->compact(); |
|
340 |
||
341 |
G1SpaceCompactClosure blk; |
|
342 |
g1h->heap_region_iterate(&blk); |
|
343 |
||
344 |
} |
|
345 |
||
346 |
// Local Variables: *** |
|
347 |
// c-indentation-style: gnu *** |
|
348 |
// End: *** |