author | thartmann |
Mon, 06 Oct 2014 07:58:50 +0200 | |
changeset 27015 | 7a7b66fbc782 |
parent 26942 | fa5ea7ff078d |
child 27410 | dd80df7cfa2b |
permissions | -rw-r--r-- |
1 | 1 |
/* |
24424
2658d7834c6e
8037816: Fix for 8036122 breaks build with Xcode5/clang
drchase
parents:
23214
diff
changeset
|
2 |
* Copyright (c) 1997, 2014, Oracle and/or its affiliates. All rights reserved. |
1 | 3 |
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
4 |
* |
|
5 |
* This code is free software; you can redistribute it and/or modify it |
|
6 |
* under the terms of the GNU General Public License version 2 only, as |
|
7 |
* published by the Free Software Foundation. |
|
8 |
* |
|
9 |
* This code is distributed in the hope that it will be useful, but WITHOUT |
|
10 |
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
|
11 |
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
|
12 |
* version 2 for more details (a copy is included in the LICENSE file that |
|
13 |
* accompanied this code). |
|
14 |
* |
|
15 |
* You should have received a copy of the GNU General Public License version |
|
16 |
* 2 along with this work; if not, write to the Free Software Foundation, |
|
17 |
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. |
|
18 |
* |
|
5547
f4b087cbb361
6941466: Oracle rebranding changes for Hotspot repositories
trims
parents:
5533
diff
changeset
|
19 |
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA |
f4b087cbb361
6941466: Oracle rebranding changes for Hotspot repositories
trims
parents:
5533
diff
changeset
|
20 |
* or visit www.oracle.com if you need additional information or have any |
f4b087cbb361
6941466: Oracle rebranding changes for Hotspot repositories
trims
parents:
5533
diff
changeset
|
21 |
* questions. |
1 | 22 |
* |
23 |
*/ |
|
24 |
||
7397 | 25 |
#include "precompiled.hpp" |
26 |
#include "code/codeBlob.hpp" |
|
27 |
#include "code/codeCache.hpp" |
|
13728
882756847a04
6964458: Reimplement class meta-data storage to use native memory
coleenp
parents:
13391
diff
changeset
|
28 |
#include "code/compiledIC.hpp" |
7397 | 29 |
#include "code/dependencies.hpp" |
13728
882756847a04
6964458: Reimplement class meta-data storage to use native memory
coleenp
parents:
13391
diff
changeset
|
30 |
#include "code/icBuffer.hpp" |
7397 | 31 |
#include "code/nmethod.hpp" |
32 |
#include "code/pcDesc.hpp" |
|
15201
f3d755b11424
8005204: Code Cache Reduction: command line options implementation
vladidan
parents:
13878
diff
changeset
|
33 |
#include "compiler/compileBroker.hpp" |
7397 | 34 |
#include "gc_implementation/shared/markSweep.hpp" |
35 |
#include "memory/allocation.inline.hpp" |
|
36 |
#include "memory/gcLocker.hpp" |
|
37 |
#include "memory/iterator.hpp" |
|
38 |
#include "memory/resourceArea.hpp" |
|
13728
882756847a04
6964458: Reimplement class meta-data storage to use native memory
coleenp
parents:
13391
diff
changeset
|
39 |
#include "oops/method.hpp" |
7397 | 40 |
#include "oops/objArrayOop.hpp" |
41 |
#include "oops/oop.inline.hpp" |
|
42 |
#include "runtime/handles.inline.hpp" |
|
15201
f3d755b11424
8005204: Code Cache Reduction: command line options implementation
vladidan
parents:
13878
diff
changeset
|
43 |
#include "runtime/arguments.hpp" |
7397 | 44 |
#include "runtime/icache.hpp" |
45 |
#include "runtime/java.hpp" |
|
46 |
#include "runtime/mutexLocker.hpp" |
|
26796 | 47 |
#include "runtime/compilationPolicy.hpp" |
7397 | 48 |
#include "services/memoryService.hpp" |
18025 | 49 |
#include "trace/tracing.hpp" |
7397 | 50 |
#include "utilities/xmlstream.hpp" |
26796 | 51 |
#ifdef COMPILER1 |
52 |
#include "c1/c1_Compilation.hpp" |
|
53 |
#include "c1/c1_Compiler.hpp" |
|
54 |
#endif |
|
55 |
#ifdef COMPILER2 |
|
56 |
#include "opto/c2compiler.hpp" |
|
57 |
#include "opto/compile.hpp" |
|
26805 | 58 |
#include "opto/node.hpp" |
26796 | 59 |
#endif |
1 | 60 |
|
61 |
// Helper class for printing in CodeCache |
|
62 |
class CodeBlob_sizes { |
|
63 |
private: |
|
64 |
int count; |
|
65 |
int total_size; |
|
66 |
int header_size; |
|
67 |
int code_size; |
|
68 |
int stub_size; |
|
69 |
int relocation_size; |
|
70 |
int scopes_oop_size; |
|
13728
882756847a04
6964458: Reimplement class meta-data storage to use native memory
coleenp
parents:
13391
diff
changeset
|
71 |
int scopes_metadata_size; |
1 | 72 |
int scopes_data_size; |
73 |
int scopes_pcs_size; |
|
74 |
||
75 |
public: |
|
76 |
CodeBlob_sizes() { |
|
77 |
count = 0; |
|
78 |
total_size = 0; |
|
79 |
header_size = 0; |
|
80 |
code_size = 0; |
|
81 |
stub_size = 0; |
|
82 |
relocation_size = 0; |
|
83 |
scopes_oop_size = 0; |
|
13728
882756847a04
6964458: Reimplement class meta-data storage to use native memory
coleenp
parents:
13391
diff
changeset
|
84 |
scopes_metadata_size = 0; |
1 | 85 |
scopes_data_size = 0; |
86 |
scopes_pcs_size = 0; |
|
87 |
} |
|
88 |
||
89 |
int total() { return total_size; } |
|
90 |
bool is_empty() { return count == 0; } |
|
91 |
||
92 |
void print(const char* title) { |
|
24424
2658d7834c6e
8037816: Fix for 8036122 breaks build with Xcode5/clang
drchase
parents:
23214
diff
changeset
|
93 |
tty->print_cr(" #%d %s = %dK (hdr %d%%, loc %d%%, code %d%%, stub %d%%, [oops %d%%, metadata %d%%, data %d%%, pcs %d%%])", |
1 | 94 |
count, |
95 |
title, |
|
24424
2658d7834c6e
8037816: Fix for 8036122 breaks build with Xcode5/clang
drchase
parents:
23214
diff
changeset
|
96 |
(int)(total() / K), |
1 | 97 |
header_size * 100 / total_size, |
98 |
relocation_size * 100 / total_size, |
|
99 |
code_size * 100 / total_size, |
|
100 |
stub_size * 100 / total_size, |
|
101 |
scopes_oop_size * 100 / total_size, |
|
13728
882756847a04
6964458: Reimplement class meta-data storage to use native memory
coleenp
parents:
13391
diff
changeset
|
102 |
scopes_metadata_size * 100 / total_size, |
1 | 103 |
scopes_data_size * 100 / total_size, |
104 |
scopes_pcs_size * 100 / total_size); |
|
105 |
} |
|
106 |
||
107 |
void add(CodeBlob* cb) { |
|
108 |
count++; |
|
109 |
total_size += cb->size(); |
|
110 |
header_size += cb->header_size(); |
|
111 |
relocation_size += cb->relocation_size(); |
|
112 |
if (cb->is_nmethod()) { |
|
5686
5435e77aa3df
6951083: oops and relocations should part of nmethod not CodeBlob
twisti
parents:
5533
diff
changeset
|
113 |
nmethod* nm = cb->as_nmethod_or_null(); |
6418 | 114 |
code_size += nm->insts_size(); |
1 | 115 |
stub_size += nm->stub_size(); |
116 |
||
5686
5435e77aa3df
6951083: oops and relocations should part of nmethod not CodeBlob
twisti
parents:
5533
diff
changeset
|
117 |
scopes_oop_size += nm->oops_size(); |
13728
882756847a04
6964458: Reimplement class meta-data storage to use native memory
coleenp
parents:
13391
diff
changeset
|
118 |
scopes_metadata_size += nm->metadata_size(); |
1 | 119 |
scopes_data_size += nm->scopes_data_size(); |
120 |
scopes_pcs_size += nm->scopes_pcs_size(); |
|
121 |
} else { |
|
6418 | 122 |
code_size += cb->code_size(); |
1 | 123 |
} |
124 |
} |
|
125 |
}; |
|
126 |
||
26796 | 127 |
// Iterate over all CodeHeaps |
128 |
#define FOR_ALL_HEAPS(heap) for (GrowableArrayIterator<CodeHeap*> heap = _heaps->begin(); heap != _heaps->end(); ++heap) |
|
129 |
// Iterate over all CodeBlobs (cb) on the given CodeHeap |
|
130 |
#define FOR_ALL_BLOBS(cb, heap) for (CodeBlob* cb = first_blob(heap); cb != NULL; cb = next_blob(heap, cb)) |
|
1 | 131 |
|
26796 | 132 |
address CodeCache::_low_bound = 0; |
133 |
address CodeCache::_high_bound = 0; |
|
1 | 134 |
int CodeCache::_number_of_blobs = 0; |
5924
dc9d04930c82
6965184: possible races in make_not_entrant_or_zombie
never
parents:
5702
diff
changeset
|
135 |
int CodeCache::_number_of_adapters = 0; |
dc9d04930c82
6965184: possible races in make_not_entrant_or_zombie
never
parents:
5702
diff
changeset
|
136 |
int CodeCache::_number_of_nmethods = 0; |
1 | 137 |
int CodeCache::_number_of_nmethods_with_dependencies = 0; |
138 |
bool CodeCache::_needs_cache_clean = false; |
|
3908 | 139 |
nmethod* CodeCache::_scavenge_root_nmethods = NULL; |
18025 | 140 |
int CodeCache::_codemem_full_count = 0; |
1 | 141 |
|
26796 | 142 |
// Initialize array of CodeHeaps |
143 |
GrowableArray<CodeHeap*>* CodeCache::_heaps = new(ResourceObj::C_HEAP, mtCode) GrowableArray<CodeHeap*> (CodeBlobType::All, true); |
|
144 |
||
145 |
void CodeCache::initialize_heaps() { |
|
146 |
// Determine size of compiler buffers |
|
147 |
size_t code_buffers_size = 0; |
|
148 |
#ifdef COMPILER1 |
|
149 |
// C1 temporary code buffers (see Compiler::init_buffer_blob()) |
|
150 |
const int c1_count = CompilationPolicy::policy()->compiler_count(CompLevel_simple); |
|
151 |
code_buffers_size += c1_count * Compiler::code_buffer_size(); |
|
152 |
#endif |
|
153 |
#ifdef COMPILER2 |
|
154 |
// C2 scratch buffers (see Compile::init_scratch_buffer_blob()) |
|
155 |
const int c2_count = CompilationPolicy::policy()->compiler_count(CompLevel_full_optimization); |
|
156 |
// Initial size of constant table (this may be increased if a compiled method needs more space) |
|
157 |
code_buffers_size += c2_count * C2Compiler::initial_code_buffer_size(); |
|
158 |
#endif |
|
1 | 159 |
|
26796 | 160 |
// Calculate default CodeHeap sizes if not set by user |
26919
361b4b4c92c0
8059468: Fix PrintCodeCache output changed by JDK-8059137
thartmann
parents:
26918
diff
changeset
|
161 |
if (!FLAG_IS_CMDLINE(NonNMethodCodeHeapSize) && !FLAG_IS_CMDLINE(ProfiledCodeHeapSize) |
26796 | 162 |
&& !FLAG_IS_CMDLINE(NonProfiledCodeHeapSize)) { |
26919
361b4b4c92c0
8059468: Fix PrintCodeCache output changed by JDK-8059137
thartmann
parents:
26918
diff
changeset
|
163 |
// Increase default NonNMethodCodeHeapSize to account for compiler buffers |
361b4b4c92c0
8059468: Fix PrintCodeCache output changed by JDK-8059137
thartmann
parents:
26918
diff
changeset
|
164 |
FLAG_SET_ERGO(uintx, NonNMethodCodeHeapSize, NonNMethodCodeHeapSize + code_buffers_size); |
26796 | 165 |
|
26919
361b4b4c92c0
8059468: Fix PrintCodeCache output changed by JDK-8059137
thartmann
parents:
26918
diff
changeset
|
166 |
// Check if we have enough space for the non-nmethod code heap |
361b4b4c92c0
8059468: Fix PrintCodeCache output changed by JDK-8059137
thartmann
parents:
26918
diff
changeset
|
167 |
if (ReservedCodeCacheSize > NonNMethodCodeHeapSize) { |
361b4b4c92c0
8059468: Fix PrintCodeCache output changed by JDK-8059137
thartmann
parents:
26918
diff
changeset
|
168 |
// Use the default value for NonNMethodCodeHeapSize and one half of the |
26796 | 169 |
// remaining size for non-profiled methods and one half for profiled methods |
26919
361b4b4c92c0
8059468: Fix PrintCodeCache output changed by JDK-8059137
thartmann
parents:
26918
diff
changeset
|
170 |
size_t remaining_size = ReservedCodeCacheSize - NonNMethodCodeHeapSize; |
26796 | 171 |
size_t profiled_size = remaining_size / 2; |
172 |
size_t non_profiled_size = remaining_size - profiled_size; |
|
173 |
FLAG_SET_ERGO(uintx, ProfiledCodeHeapSize, profiled_size); |
|
174 |
FLAG_SET_ERGO(uintx, NonProfiledCodeHeapSize, non_profiled_size); |
|
175 |
} else { |
|
26919
361b4b4c92c0
8059468: Fix PrintCodeCache output changed by JDK-8059137
thartmann
parents:
26918
diff
changeset
|
176 |
// Use all space for the non-nmethod heap and set other heaps to minimal size |
361b4b4c92c0
8059468: Fix PrintCodeCache output changed by JDK-8059137
thartmann
parents:
26918
diff
changeset
|
177 |
FLAG_SET_ERGO(uintx, NonNMethodCodeHeapSize, ReservedCodeCacheSize - os::vm_page_size() * 2); |
26796 | 178 |
FLAG_SET_ERGO(uintx, ProfiledCodeHeapSize, os::vm_page_size()); |
179 |
FLAG_SET_ERGO(uintx, NonProfiledCodeHeapSize, os::vm_page_size()); |
|
180 |
} |
|
181 |
} |
|
1 | 182 |
|
26796 | 183 |
// We do not need the profiled CodeHeap, use all space for the non-profiled CodeHeap |
184 |
if(!heap_available(CodeBlobType::MethodProfiled)) { |
|
185 |
FLAG_SET_ERGO(uintx, NonProfiledCodeHeapSize, NonProfiledCodeHeapSize + ProfiledCodeHeapSize); |
|
186 |
FLAG_SET_ERGO(uintx, ProfiledCodeHeapSize, 0); |
|
187 |
} |
|
26919
361b4b4c92c0
8059468: Fix PrintCodeCache output changed by JDK-8059137
thartmann
parents:
26918
diff
changeset
|
188 |
// We do not need the non-profiled CodeHeap, use all space for the non-nmethod CodeHeap |
26796 | 189 |
if(!heap_available(CodeBlobType::MethodNonProfiled)) { |
26919
361b4b4c92c0
8059468: Fix PrintCodeCache output changed by JDK-8059137
thartmann
parents:
26918
diff
changeset
|
190 |
FLAG_SET_ERGO(uintx, NonNMethodCodeHeapSize, NonNMethodCodeHeapSize + NonProfiledCodeHeapSize); |
26796 | 191 |
FLAG_SET_ERGO(uintx, NonProfiledCodeHeapSize, 0); |
192 |
} |
|
193 |
||
194 |
// Make sure we have enough space for VM internal code |
|
195 |
uint min_code_cache_size = (CodeCacheMinimumUseSpace DEBUG_ONLY(* 3)) + CodeCacheMinimumFreeSpace; |
|
26919
361b4b4c92c0
8059468: Fix PrintCodeCache output changed by JDK-8059137
thartmann
parents:
26918
diff
changeset
|
196 |
if (NonNMethodCodeHeapSize < (min_code_cache_size + code_buffers_size)) { |
361b4b4c92c0
8059468: Fix PrintCodeCache output changed by JDK-8059137
thartmann
parents:
26918
diff
changeset
|
197 |
vm_exit_during_initialization("Not enough space in non-nmethod code heap to run VM."); |
26796 | 198 |
} |
26919
361b4b4c92c0
8059468: Fix PrintCodeCache output changed by JDK-8059137
thartmann
parents:
26918
diff
changeset
|
199 |
guarantee(NonProfiledCodeHeapSize + ProfiledCodeHeapSize + NonNMethodCodeHeapSize <= ReservedCodeCacheSize, "Size check"); |
26796 | 200 |
|
201 |
// Align reserved sizes of CodeHeaps |
|
26919
361b4b4c92c0
8059468: Fix PrintCodeCache output changed by JDK-8059137
thartmann
parents:
26918
diff
changeset
|
202 |
size_t non_method_size = ReservedCodeSpace::allocation_align_size_up(NonNMethodCodeHeapSize); |
26796 | 203 |
size_t profiled_size = ReservedCodeSpace::allocation_align_size_up(ProfiledCodeHeapSize); |
204 |
size_t non_profiled_size = ReservedCodeSpace::allocation_align_size_up(NonProfiledCodeHeapSize); |
|
1 | 205 |
|
26796 | 206 |
// Compute initial sizes of CodeHeaps |
207 |
size_t init_non_method_size = MIN2(InitialCodeCacheSize, non_method_size); |
|
208 |
size_t init_profiled_size = MIN2(InitialCodeCacheSize, profiled_size); |
|
209 |
size_t init_non_profiled_size = MIN2(InitialCodeCacheSize, non_profiled_size); |
|
1 | 210 |
|
26796 | 211 |
// Reserve one continuous chunk of memory for CodeHeaps and split it into |
212 |
// parts for the individual heaps. The memory layout looks like this: |
|
213 |
// ---------- high ----------- |
|
214 |
// Non-profiled nmethods |
|
215 |
// Profiled nmethods |
|
26919
361b4b4c92c0
8059468: Fix PrintCodeCache output changed by JDK-8059137
thartmann
parents:
26918
diff
changeset
|
216 |
// Non-nmethods |
26796 | 217 |
// ---------- low ------------ |
218 |
ReservedCodeSpace rs = reserve_heap_memory(non_profiled_size + profiled_size + non_method_size); |
|
219 |
ReservedSpace non_method_space = rs.first_part(non_method_size); |
|
220 |
ReservedSpace rest = rs.last_part(non_method_size); |
|
221 |
ReservedSpace profiled_space = rest.first_part(profiled_size); |
|
222 |
ReservedSpace non_profiled_space = rest.last_part(profiled_size); |
|
223 |
||
26919
361b4b4c92c0
8059468: Fix PrintCodeCache output changed by JDK-8059137
thartmann
parents:
26918
diff
changeset
|
224 |
// Non-nmethods (stubs, adapters, ...) |
361b4b4c92c0
8059468: Fix PrintCodeCache output changed by JDK-8059137
thartmann
parents:
26918
diff
changeset
|
225 |
add_heap(non_method_space, "CodeHeap 'non-nmethods'", init_non_method_size, CodeBlobType::NonNMethod); |
26796 | 226 |
// Tier 2 and tier 3 (profiled) methods |
26919
361b4b4c92c0
8059468: Fix PrintCodeCache output changed by JDK-8059137
thartmann
parents:
26918
diff
changeset
|
227 |
add_heap(profiled_space, "CodeHeap 'profiled nmethods'", init_profiled_size, CodeBlobType::MethodProfiled); |
26796 | 228 |
// Tier 1 and tier 4 (non-profiled) methods and native methods |
26919
361b4b4c92c0
8059468: Fix PrintCodeCache output changed by JDK-8059137
thartmann
parents:
26918
diff
changeset
|
229 |
add_heap(non_profiled_space, "CodeHeap 'non-profiled nmethods'", init_non_profiled_size, CodeBlobType::MethodNonProfiled); |
1 | 230 |
} |
231 |
||
26796 | 232 |
ReservedCodeSpace CodeCache::reserve_heap_memory(size_t size) { |
233 |
// Determine alignment |
|
234 |
const size_t page_size = os::can_execute_large_page_memory() ? |
|
26809 | 235 |
MIN2(os::page_size_for_region(InitialCodeCacheSize, 8), |
236 |
os::page_size_for_region(size, 8)) : |
|
26796 | 237 |
os::vm_page_size(); |
238 |
const size_t granularity = os::vm_allocation_granularity(); |
|
239 |
const size_t r_align = MAX2(page_size, granularity); |
|
240 |
const size_t r_size = align_size_up(size, r_align); |
|
241 |
const size_t rs_align = page_size == (size_t) os::vm_page_size() ? 0 : |
|
242 |
MAX2(page_size, granularity); |
|
1 | 243 |
|
26796 | 244 |
ReservedCodeSpace rs(r_size, rs_align, rs_align > 0); |
245 |
||
246 |
// Initialize bounds |
|
247 |
_low_bound = (address)rs.base(); |
|
248 |
_high_bound = _low_bound + rs.size(); |
|
249 |
||
250 |
return rs; |
|
251 |
} |
|
252 |
||
253 |
bool CodeCache::heap_available(int code_blob_type) { |
|
254 |
if (!SegmentedCodeCache) { |
|
255 |
// No segmentation: use a single code heap |
|
256 |
return (code_blob_type == CodeBlobType::All); |
|
27015
7a7b66fbc782
8059559: SIGSEGV at CodeHeap::allocate(unsigned int, bool)
thartmann
parents:
26942
diff
changeset
|
257 |
} else if (Arguments::mode() == Arguments::_int) { |
26796 | 258 |
// Interpreter only: we don't need any method code heaps |
26919
361b4b4c92c0
8059468: Fix PrintCodeCache output changed by JDK-8059137
thartmann
parents:
26918
diff
changeset
|
259 |
return (code_blob_type == CodeBlobType::NonNMethod); |
26796 | 260 |
} else if (TieredCompilation && (TieredStopAtLevel > CompLevel_simple)) { |
261 |
// Tiered compilation: use all code heaps |
|
262 |
return (code_blob_type < CodeBlobType::All); |
|
263 |
} else { |
|
26919
361b4b4c92c0
8059468: Fix PrintCodeCache output changed by JDK-8059137
thartmann
parents:
26918
diff
changeset
|
264 |
// No TieredCompilation: we only need the non-nmethod and non-profiled code heap |
361b4b4c92c0
8059468: Fix PrintCodeCache output changed by JDK-8059137
thartmann
parents:
26918
diff
changeset
|
265 |
return (code_blob_type == CodeBlobType::NonNMethod) || |
26796 | 266 |
(code_blob_type == CodeBlobType::MethodNonProfiled); |
267 |
} |
|
1 | 268 |
} |
269 |
||
26796 | 270 |
void CodeCache::add_heap(ReservedSpace rs, const char* name, size_t size_initial, int code_blob_type) { |
271 |
// Check if heap is needed |
|
272 |
if (!heap_available(code_blob_type)) { |
|
273 |
return; |
|
5533
e8d9ff82ec62
6950075: nmethod sweeper should operate concurrently
never
parents:
5247
diff
changeset
|
274 |
} |
26796 | 275 |
|
276 |
// Create CodeHeap |
|
277 |
CodeHeap* heap = new CodeHeap(name, code_blob_type); |
|
278 |
_heaps->append(heap); |
|
279 |
||
280 |
// Reserve Space |
|
281 |
size_initial = round_to(size_initial, os::vm_page_size()); |
|
282 |
||
283 |
if (!heap->reserve(rs, size_initial, CodeCacheSegmentSize)) { |
|
284 |
vm_exit_during_initialization("Could not reserve enough space for code cache"); |
|
285 |
} |
|
286 |
||
287 |
// Register the CodeHeap |
|
288 |
MemoryService::add_code_heap_memory_pool(heap, name); |
|
289 |
} |
|
290 |
||
291 |
CodeHeap* CodeCache::get_code_heap(CodeBlob* cb) { |
|
292 |
assert(cb != NULL, "CodeBlob is null"); |
|
293 |
FOR_ALL_HEAPS(heap) { |
|
294 |
if ((*heap)->contains(cb)) { |
|
295 |
return *heap; |
|
296 |
} |
|
297 |
} |
|
298 |
ShouldNotReachHere(); |
|
299 |
return NULL; |
|
5533
e8d9ff82ec62
6950075: nmethod sweeper should operate concurrently
never
parents:
5247
diff
changeset
|
300 |
} |
e8d9ff82ec62
6950075: nmethod sweeper should operate concurrently
never
parents:
5247
diff
changeset
|
301 |
|
26796 | 302 |
CodeHeap* CodeCache::get_code_heap(int code_blob_type) { |
303 |
FOR_ALL_HEAPS(heap) { |
|
304 |
if ((*heap)->accepts(code_blob_type)) { |
|
305 |
return *heap; |
|
306 |
} |
|
307 |
} |
|
308 |
return NULL; |
|
309 |
} |
|
310 |
||
311 |
CodeBlob* CodeCache::first_blob(CodeHeap* heap) { |
|
5533
e8d9ff82ec62
6950075: nmethod sweeper should operate concurrently
never
parents:
5247
diff
changeset
|
312 |
assert_locked_or_safepoint(CodeCache_lock); |
26796 | 313 |
assert(heap != NULL, "heap is null"); |
314 |
return (CodeBlob*)heap->first(); |
|
5533
e8d9ff82ec62
6950075: nmethod sweeper should operate concurrently
never
parents:
5247
diff
changeset
|
315 |
} |
1 | 316 |
|
26796 | 317 |
CodeBlob* CodeCache::first_blob(int code_blob_type) { |
318 |
if (heap_available(code_blob_type)) { |
|
319 |
return first_blob(get_code_heap(code_blob_type)); |
|
320 |
} else { |
|
321 |
return NULL; |
|
322 |
} |
|
323 |
} |
|
15201
f3d755b11424
8005204: Code Cache Reduction: command line options implementation
vladidan
parents:
13878
diff
changeset
|
324 |
|
26796 | 325 |
CodeBlob* CodeCache::next_blob(CodeHeap* heap, CodeBlob* cb) { |
326 |
assert_locked_or_safepoint(CodeCache_lock); |
|
327 |
assert(heap != NULL, "heap is null"); |
|
328 |
return (CodeBlob*)heap->next(cb); |
|
329 |
} |
|
330 |
||
331 |
CodeBlob* CodeCache::next_blob(CodeBlob* cb) { |
|
332 |
return next_blob(get_code_heap(cb), cb); |
|
333 |
} |
|
334 |
||
335 |
CodeBlob* CodeCache::allocate(int size, int code_blob_type, bool is_critical) { |
|
1 | 336 |
// Do not seize the CodeCache lock here--if the caller has not |
337 |
// already done so, we are going to lose bigtime, since the code |
|
338 |
// cache will contain a garbage CodeBlob until the caller can |
|
339 |
// run the constructor for the CodeBlob subclass he is busy |
|
340 |
// instantiating. |
|
341 |
assert_locked_or_safepoint(CodeCache_lock); |
|
24439
252c634b2e1c
8029343: CodeCache::allocate increments '_number_of_blobs' even if allocation fails.
anoll
parents:
23214
diff
changeset
|
342 |
assert(size > 0, "allocation request must be reasonable"); |
252c634b2e1c
8029343: CodeCache::allocate increments '_number_of_blobs' even if allocation fails.
anoll
parents:
23214
diff
changeset
|
343 |
if (size <= 0) { |
252c634b2e1c
8029343: CodeCache::allocate increments '_number_of_blobs' even if allocation fails.
anoll
parents:
23214
diff
changeset
|
344 |
return NULL; |
252c634b2e1c
8029343: CodeCache::allocate increments '_number_of_blobs' even if allocation fails.
anoll
parents:
23214
diff
changeset
|
345 |
} |
1 | 346 |
CodeBlob* cb = NULL; |
26796 | 347 |
|
348 |
// Get CodeHeap for the given CodeBlobType |
|
26942
fa5ea7ff078d
8059390: code cache fills up for bigapps/Weblogic+medrec/nowarnings
zmajo
parents:
26919
diff
changeset
|
349 |
CodeHeap* heap = get_code_heap(code_blob_type); |
fa5ea7ff078d
8059390: code cache fills up for bigapps/Weblogic+medrec/nowarnings
zmajo
parents:
26919
diff
changeset
|
350 |
assert(heap != NULL, "heap is null"); |
26796 | 351 |
|
1 | 352 |
while (true) { |
26796 | 353 |
cb = (CodeBlob*)heap->allocate(size, is_critical); |
1 | 354 |
if (cb != NULL) break; |
26796 | 355 |
if (!heap->expand_by(CodeCacheExpansionSize)) { |
1 | 356 |
// Expansion failed |
26919
361b4b4c92c0
8059468: Fix PrintCodeCache output changed by JDK-8059137
thartmann
parents:
26918
diff
changeset
|
357 |
if (SegmentedCodeCache && (code_blob_type == CodeBlobType::NonNMethod)) { |
361b4b4c92c0
8059468: Fix PrintCodeCache output changed by JDK-8059137
thartmann
parents:
26918
diff
changeset
|
358 |
// Fallback solution: Store non-nmethod code in the non-profiled code heap |
26796 | 359 |
return allocate(size, CodeBlobType::MethodNonProfiled, is_critical); |
360 |
} |
|
1 | 361 |
return NULL; |
362 |
} |
|
363 |
if (PrintCodeCacheExtension) { |
|
364 |
ResourceMark rm; |
|
26796 | 365 |
if (SegmentedCodeCache) { |
26918
4e1761669426
8059137: MemoryPoolMXBeans for different code heaps should contain 'Code heap' in their names
thartmann
parents:
26809
diff
changeset
|
366 |
tty->print("%s", heap->name()); |
26796 | 367 |
} else { |
26919
361b4b4c92c0
8059468: Fix PrintCodeCache output changed by JDK-8059137
thartmann
parents:
26918
diff
changeset
|
368 |
tty->print("CodeCache"); |
26796 | 369 |
} |
370 |
tty->print_cr(" extended to [" INTPTR_FORMAT ", " INTPTR_FORMAT "] (" SSIZE_FORMAT " bytes)", |
|
371 |
(intptr_t)heap->low_boundary(), (intptr_t)heap->high(), |
|
372 |
(address)heap->high() - (address)heap->low_boundary()); |
|
1 | 373 |
} |
374 |
} |
|
3908 | 375 |
print_trace("allocation", cb, size); |
24439
252c634b2e1c
8029343: CodeCache::allocate increments '_number_of_blobs' even if allocation fails.
anoll
parents:
23214
diff
changeset
|
376 |
_number_of_blobs++; |
1 | 377 |
return cb; |
378 |
} |
|
379 |
||
380 |
void CodeCache::free(CodeBlob* cb) { |
|
381 |
assert_locked_or_safepoint(CodeCache_lock); |
|
382 |
||
3908 | 383 |
print_trace("free", cb); |
5924
dc9d04930c82
6965184: possible races in make_not_entrant_or_zombie
never
parents:
5702
diff
changeset
|
384 |
if (cb->is_nmethod()) { |
dc9d04930c82
6965184: possible races in make_not_entrant_or_zombie
never
parents:
5702
diff
changeset
|
385 |
_number_of_nmethods--; |
dc9d04930c82
6965184: possible races in make_not_entrant_or_zombie
never
parents:
5702
diff
changeset
|
386 |
if (((nmethod *)cb)->has_dependencies()) { |
dc9d04930c82
6965184: possible races in make_not_entrant_or_zombie
never
parents:
5702
diff
changeset
|
387 |
_number_of_nmethods_with_dependencies--; |
dc9d04930c82
6965184: possible races in make_not_entrant_or_zombie
never
parents:
5702
diff
changeset
|
388 |
} |
dc9d04930c82
6965184: possible races in make_not_entrant_or_zombie
never
parents:
5702
diff
changeset
|
389 |
} |
dc9d04930c82
6965184: possible races in make_not_entrant_or_zombie
never
parents:
5702
diff
changeset
|
390 |
if (cb->is_adapter_blob()) { |
dc9d04930c82
6965184: possible races in make_not_entrant_or_zombie
never
parents:
5702
diff
changeset
|
391 |
_number_of_adapters--; |
1 | 392 |
} |
393 |
_number_of_blobs--; |
|
394 |
||
26796 | 395 |
// Get heap for given CodeBlob and deallocate |
396 |
get_code_heap(cb)->deallocate(cb); |
|
1 | 397 |
|
398 |
assert(_number_of_blobs >= 0, "sanity check"); |
|
399 |
} |
|
400 |
||
401 |
void CodeCache::commit(CodeBlob* cb) { |
|
402 |
// this is called by nmethod::nmethod, which must already own CodeCache_lock |
|
403 |
assert_locked_or_safepoint(CodeCache_lock); |
|
5924
dc9d04930c82
6965184: possible races in make_not_entrant_or_zombie
never
parents:
5702
diff
changeset
|
404 |
if (cb->is_nmethod()) { |
dc9d04930c82
6965184: possible races in make_not_entrant_or_zombie
never
parents:
5702
diff
changeset
|
405 |
_number_of_nmethods++; |
dc9d04930c82
6965184: possible races in make_not_entrant_or_zombie
never
parents:
5702
diff
changeset
|
406 |
if (((nmethod *)cb)->has_dependencies()) { |
dc9d04930c82
6965184: possible races in make_not_entrant_or_zombie
never
parents:
5702
diff
changeset
|
407 |
_number_of_nmethods_with_dependencies++; |
dc9d04930c82
6965184: possible races in make_not_entrant_or_zombie
never
parents:
5702
diff
changeset
|
408 |
} |
1 | 409 |
} |
5924
dc9d04930c82
6965184: possible races in make_not_entrant_or_zombie
never
parents:
5702
diff
changeset
|
410 |
if (cb->is_adapter_blob()) { |
dc9d04930c82
6965184: possible races in make_not_entrant_or_zombie
never
parents:
5702
diff
changeset
|
411 |
_number_of_adapters++; |
dc9d04930c82
6965184: possible races in make_not_entrant_or_zombie
never
parents:
5702
diff
changeset
|
412 |
} |
dc9d04930c82
6965184: possible races in make_not_entrant_or_zombie
never
parents:
5702
diff
changeset
|
413 |
|
1 | 414 |
// flush the hardware I-cache |
6418 | 415 |
ICache::invalidate_range(cb->content_begin(), cb->content_size()); |
1 | 416 |
} |
417 |
||
418 |
bool CodeCache::contains(void *p) { |
|
419 |
// It should be ok to call contains without holding a lock |
|
26796 | 420 |
FOR_ALL_HEAPS(heap) { |
421 |
if ((*heap)->contains(p)) { |
|
422 |
return true; |
|
423 |
} |
|
424 |
} |
|
425 |
return false; |
|
1 | 426 |
} |
427 |
||
26796 | 428 |
// This method is safe to call without holding the CodeCache_lock, as long as a dead CodeBlob is not |
429 |
// looked up (i.e., one that has been marked for deletion). It only depends on the _segmap to contain |
|
1 | 430 |
// valid indices, which it will always do, as long as the CodeBlob is not in the process of being recycled. |
431 |
CodeBlob* CodeCache::find_blob(void* start) { |
|
432 |
CodeBlob* result = find_blob_unsafe(start); |
|
23214
b6426873cb37
8029799: vm/mlvm/anonloader/stress/oome prints warning: CodeHeap: # of free blocks > 10000
anoll
parents:
22921
diff
changeset
|
433 |
// We could potentially look up non_entrant methods |
26796 | 434 |
guarantee(result == NULL || !result->is_zombie() || result->is_locked_by_vm() || is_error_reported(), "unsafe access to zombie method"); |
1 | 435 |
return result; |
436 |
} |
|
437 |
||
26796 | 438 |
// Lookup that does not fail if you lookup a zombie method (if you call this, be sure to know |
439 |
// what you are doing) |
|
440 |
CodeBlob* CodeCache::find_blob_unsafe(void* start) { |
|
441 |
// NMT can walk the stack before code cache is created |
|
442 |
if (_heaps == NULL || _heaps->is_empty()) return NULL; |
|
443 |
||
444 |
FOR_ALL_HEAPS(heap) { |
|
445 |
CodeBlob* result = (CodeBlob*) (*heap)->find_start(start); |
|
446 |
if (result != NULL && result->blob_contains((address)start)) { |
|
447 |
return result; |
|
448 |
} |
|
449 |
} |
|
450 |
return NULL; |
|
451 |
} |
|
452 |
||
1 | 453 |
nmethod* CodeCache::find_nmethod(void* start) { |
26796 | 454 |
CodeBlob* cb = find_blob(start); |
455 |
assert(cb->is_nmethod(), "did not find an nmethod"); |
|
1 | 456 |
return (nmethod*)cb; |
457 |
} |
|
458 |
||
459 |
void CodeCache::blobs_do(void f(CodeBlob* nm)) { |
|
460 |
assert_locked_or_safepoint(CodeCache_lock); |
|
26796 | 461 |
FOR_ALL_HEAPS(heap) { |
462 |
FOR_ALL_BLOBS(cb, *heap) { |
|
463 |
f(cb); |
|
464 |
} |
|
1 | 465 |
} |
466 |
} |
|
467 |
||
468 |
void CodeCache::nmethods_do(void f(nmethod* nm)) { |
|
469 |
assert_locked_or_safepoint(CodeCache_lock); |
|
26796 | 470 |
NMethodIterator iter; |
471 |
while(iter.next()) { |
|
472 |
f(iter.method()); |
|
1 | 473 |
} |
474 |
} |
|
475 |
||
13728
882756847a04
6964458: Reimplement class meta-data storage to use native memory
coleenp
parents:
13391
diff
changeset
|
476 |
void CodeCache::alive_nmethods_do(void f(nmethod* nm)) { |
882756847a04
6964458: Reimplement class meta-data storage to use native memory
coleenp
parents:
13391
diff
changeset
|
477 |
assert_locked_or_safepoint(CodeCache_lock); |
26796 | 478 |
NMethodIterator iter; |
479 |
while(iter.next_alive()) { |
|
480 |
f(iter.method()); |
|
13728
882756847a04
6964458: Reimplement class meta-data storage to use native memory
coleenp
parents:
13391
diff
changeset
|
481 |
} |
882756847a04
6964458: Reimplement class meta-data storage to use native memory
coleenp
parents:
13391
diff
changeset
|
482 |
} |
1 | 483 |
|
484 |
int CodeCache::alignment_unit() { |
|
26796 | 485 |
return (int)_heaps->first()->alignment_unit(); |
1 | 486 |
} |
487 |
||
488 |
int CodeCache::alignment_offset() { |
|
26796 | 489 |
return (int)_heaps->first()->alignment_offset(); |
1 | 490 |
} |
491 |
||
26796 | 492 |
// Mark nmethods for unloading if they contain otherwise unreachable oops. |
13878
6e6a462a6cff
7200470: KeepAliveClosure not needed in CodeCache::do_unloading
brutisso
parents:
13728
diff
changeset
|
493 |
void CodeCache::do_unloading(BoolObjectClosure* is_alive, bool unloading_occurred) { |
1 | 494 |
assert_locked_or_safepoint(CodeCache_lock); |
26796 | 495 |
NMethodIterator iter; |
496 |
while(iter.next_alive()) { |
|
497 |
iter.method()->do_unloading(is_alive, unloading_occurred); |
|
1 | 498 |
} |
499 |
} |
|
500 |
||
3908 | 501 |
void CodeCache::blobs_do(CodeBlobClosure* f) { |
1 | 502 |
assert_locked_or_safepoint(CodeCache_lock); |
26796 | 503 |
FOR_ALL_HEAPS(heap) { |
504 |
FOR_ALL_BLOBS(cb, *heap) { |
|
505 |
if (cb->is_alive()) { |
|
506 |
f->do_code_blob(cb); |
|
3908 | 507 |
|
508 |
#ifdef ASSERT |
|
26796 | 509 |
if (cb->is_nmethod()) |
510 |
((nmethod*)cb)->verify_scavenge_root_oops(); |
|
3908 | 511 |
#endif //ASSERT |
26796 | 512 |
} |
513 |
} |
|
1 | 514 |
} |
515 |
} |
|
516 |
||
3908 | 517 |
// Walk the list of methods which might contain non-perm oops. |
518 |
void CodeCache::scavenge_root_nmethods_do(CodeBlobClosure* f) { |
|
519 |
assert_locked_or_safepoint(CodeCache_lock); |
|
25492
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
520 |
|
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
521 |
if (UseG1GC) { |
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
522 |
return; |
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
523 |
} |
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
524 |
|
3908 | 525 |
debug_only(mark_scavenge_root_nmethods()); |
526 |
||
527 |
for (nmethod* cur = scavenge_root_nmethods(); cur != NULL; cur = cur->scavenge_root_link()) { |
|
528 |
debug_only(cur->clear_scavenge_root_marked()); |
|
529 |
assert(cur->scavenge_root_not_marked(), ""); |
|
530 |
assert(cur->on_scavenge_root_list(), "else shouldn't be on this list"); |
|
531 |
||
532 |
bool is_live = (!cur->is_zombie() && !cur->is_unloaded()); |
|
533 |
#ifndef PRODUCT |
|
534 |
if (TraceScavenge) { |
|
535 |
cur->print_on(tty, is_live ? "scavenge root" : "dead scavenge root"); tty->cr(); |
|
536 |
} |
|
537 |
#endif //PRODUCT |
|
5247
c2b4e525b3e5
6940520: CodeCache::scavenge_root_nmethods_do must fix oop relocations
twisti
parents:
4750
diff
changeset
|
538 |
if (is_live) { |
3908 | 539 |
// Perform cur->oops_do(f), maybe just once per nmethod. |
540 |
f->do_code_blob(cur); |
|
5247
c2b4e525b3e5
6940520: CodeCache::scavenge_root_nmethods_do must fix oop relocations
twisti
parents:
4750
diff
changeset
|
541 |
} |
3908 | 542 |
} |
543 |
||
544 |
// Check for stray marks. |
|
545 |
debug_only(verify_perm_nmethods(NULL)); |
|
546 |
} |
|
547 |
||
548 |
void CodeCache::add_scavenge_root_nmethod(nmethod* nm) { |
|
549 |
assert_locked_or_safepoint(CodeCache_lock); |
|
25492
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
550 |
|
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
551 |
if (UseG1GC) { |
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
552 |
return; |
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
553 |
} |
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
554 |
|
3908 | 555 |
nm->set_on_scavenge_root_list(); |
556 |
nm->set_scavenge_root_link(_scavenge_root_nmethods); |
|
557 |
set_scavenge_root_nmethods(nm); |
|
558 |
print_trace("add_scavenge_root", nm); |
|
559 |
} |
|
560 |
||
561 |
void CodeCache::drop_scavenge_root_nmethod(nmethod* nm) { |
|
562 |
assert_locked_or_safepoint(CodeCache_lock); |
|
25492
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
563 |
|
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
564 |
if (UseG1GC) { |
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
565 |
return; |
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
566 |
} |
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
567 |
|
3908 | 568 |
print_trace("drop_scavenge_root", nm); |
569 |
nmethod* last = NULL; |
|
570 |
nmethod* cur = scavenge_root_nmethods(); |
|
571 |
while (cur != NULL) { |
|
572 |
nmethod* next = cur->scavenge_root_link(); |
|
573 |
if (cur == nm) { |
|
574 |
if (last != NULL) |
|
575 |
last->set_scavenge_root_link(next); |
|
576 |
else set_scavenge_root_nmethods(next); |
|
577 |
nm->set_scavenge_root_link(NULL); |
|
578 |
nm->clear_on_scavenge_root_list(); |
|
579 |
return; |
|
580 |
} |
|
581 |
last = cur; |
|
582 |
cur = next; |
|
583 |
} |
|
584 |
assert(false, "should have been on list"); |
|
585 |
} |
|
586 |
||
587 |
void CodeCache::prune_scavenge_root_nmethods() { |
|
588 |
assert_locked_or_safepoint(CodeCache_lock); |
|
25492
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
589 |
|
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
590 |
if (UseG1GC) { |
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
591 |
return; |
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
592 |
} |
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
593 |
|
3908 | 594 |
debug_only(mark_scavenge_root_nmethods()); |
595 |
||
596 |
nmethod* last = NULL; |
|
597 |
nmethod* cur = scavenge_root_nmethods(); |
|
598 |
while (cur != NULL) { |
|
599 |
nmethod* next = cur->scavenge_root_link(); |
|
600 |
debug_only(cur->clear_scavenge_root_marked()); |
|
601 |
assert(cur->scavenge_root_not_marked(), ""); |
|
602 |
assert(cur->on_scavenge_root_list(), "else shouldn't be on this list"); |
|
603 |
||
604 |
if (!cur->is_zombie() && !cur->is_unloaded() |
|
605 |
&& cur->detect_scavenge_root_oops()) { |
|
606 |
// Keep it. Advance 'last' to prevent deletion. |
|
607 |
last = cur; |
|
608 |
} else { |
|
609 |
// Prune it from the list, so we don't have to look at it any more. |
|
610 |
print_trace("prune_scavenge_root", cur); |
|
611 |
cur->set_scavenge_root_link(NULL); |
|
612 |
cur->clear_on_scavenge_root_list(); |
|
613 |
if (last != NULL) |
|
614 |
last->set_scavenge_root_link(next); |
|
615 |
else set_scavenge_root_nmethods(next); |
|
616 |
} |
|
617 |
cur = next; |
|
618 |
} |
|
619 |
||
620 |
// Check for stray marks. |
|
621 |
debug_only(verify_perm_nmethods(NULL)); |
|
622 |
} |
|
623 |
||
624 |
#ifndef PRODUCT |
|
625 |
void CodeCache::asserted_non_scavengable_nmethods_do(CodeBlobClosure* f) { |
|
25492
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
626 |
if (UseG1GC) { |
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
627 |
return; |
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
628 |
} |
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
629 |
|
3908 | 630 |
// While we are here, verify the integrity of the list. |
631 |
mark_scavenge_root_nmethods(); |
|
632 |
for (nmethod* cur = scavenge_root_nmethods(); cur != NULL; cur = cur->scavenge_root_link()) { |
|
633 |
assert(cur->on_scavenge_root_list(), "else shouldn't be on this list"); |
|
634 |
cur->clear_scavenge_root_marked(); |
|
635 |
} |
|
636 |
verify_perm_nmethods(f); |
|
637 |
} |
|
638 |
||
639 |
// Temporarily mark nmethods that are claimed to be on the non-perm list. |
|
640 |
void CodeCache::mark_scavenge_root_nmethods() { |
|
26796 | 641 |
NMethodIterator iter; |
642 |
while(iter.next_alive()) { |
|
643 |
nmethod* nm = iter.method(); |
|
644 |
assert(nm->scavenge_root_not_marked(), "clean state"); |
|
645 |
if (nm->on_scavenge_root_list()) |
|
646 |
nm->set_scavenge_root_marked(); |
|
3908 | 647 |
} |
648 |
} |
|
649 |
||
650 |
// If the closure is given, run it on the unlisted nmethods. |
|
651 |
// Also make sure that the effects of mark_scavenge_root_nmethods is gone. |
|
652 |
void CodeCache::verify_perm_nmethods(CodeBlobClosure* f_or_null) { |
|
26796 | 653 |
NMethodIterator iter; |
654 |
while(iter.next_alive()) { |
|
655 |
nmethod* nm = iter.method(); |
|
3908 | 656 |
bool call_f = (f_or_null != NULL); |
26796 | 657 |
assert(nm->scavenge_root_not_marked(), "must be already processed"); |
658 |
if (nm->on_scavenge_root_list()) |
|
659 |
call_f = false; // don't show this one to the client |
|
660 |
nm->verify_scavenge_root_oops(); |
|
661 |
if (call_f) f_or_null->do_code_blob(nm); |
|
3908 | 662 |
} |
663 |
} |
|
664 |
#endif //PRODUCT |
|
665 |
||
25492
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
666 |
void CodeCache::verify_clean_inline_caches() { |
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
667 |
#ifdef ASSERT |
26796 | 668 |
NMethodIterator iter; |
669 |
while(iter.next_alive()) { |
|
670 |
nmethod* nm = iter.method(); |
|
671 |
assert(!nm->is_unloaded(), "Tautology"); |
|
672 |
nm->verify_clean_inline_caches(); |
|
673 |
nm->verify(); |
|
25492
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
674 |
} |
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
675 |
#endif |
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
676 |
} |
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
677 |
|
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
678 |
void CodeCache::verify_icholder_relocations() { |
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
679 |
#ifdef ASSERT |
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
680 |
// make sure that we aren't leaking icholders |
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
681 |
int count = 0; |
26796 | 682 |
FOR_ALL_HEAPS(heap) { |
683 |
FOR_ALL_BLOBS(cb, *heap) { |
|
684 |
if (cb->is_nmethod()) { |
|
685 |
nmethod* nm = (nmethod*)cb; |
|
686 |
count += nm->verify_icholder_relocations(); |
|
687 |
} |
|
25492
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
688 |
} |
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
689 |
} |
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
690 |
|
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
691 |
assert(count + InlineCacheBuffer::pending_icholder_count() + CompiledICHolder::live_not_claimed_count() == |
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
692 |
CompiledICHolder::live_count(), "must agree"); |
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
693 |
#endif |
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
694 |
} |
4750 | 695 |
|
1 | 696 |
void CodeCache::gc_prologue() { |
697 |
} |
|
698 |
||
699 |
void CodeCache::gc_epilogue() { |
|
700 |
assert_locked_or_safepoint(CodeCache_lock); |
|
26796 | 701 |
NMethodIterator iter; |
702 |
while(iter.next_alive()) { |
|
703 |
nmethod* nm = iter.method(); |
|
704 |
assert(!nm->is_unloaded(), "Tautology"); |
|
705 |
if (needs_cache_clean()) { |
|
706 |
nm->cleanup_inline_caches(); |
|
1 | 707 |
} |
26796 | 708 |
DEBUG_ONLY(nm->verify()); |
709 |
DEBUG_ONLY(nm->verify_oop_relocations()); |
|
1 | 710 |
} |
711 |
set_needs_cache_clean(false); |
|
3908 | 712 |
prune_scavenge_root_nmethods(); |
13728
882756847a04
6964458: Reimplement class meta-data storage to use native memory
coleenp
parents:
13391
diff
changeset
|
713 |
|
25492
d27050bdfb04
8049421: G1 Class Unloading after completing a concurrent mark cycle
stefank
parents:
25491
diff
changeset
|
714 |
verify_icholder_relocations(); |
1 | 715 |
} |
716 |
||
8724
693c6b883b54
7028374: race in fix_oop_relocations for scavengeable nmethods
never
parents:
8672
diff
changeset
|
717 |
void CodeCache::verify_oops() { |
693c6b883b54
7028374: race in fix_oop_relocations for scavengeable nmethods
never
parents:
8672
diff
changeset
|
718 |
MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag); |
693c6b883b54
7028374: race in fix_oop_relocations for scavengeable nmethods
never
parents:
8672
diff
changeset
|
719 |
VerifyOopClosure voc; |
26796 | 720 |
NMethodIterator iter; |
721 |
while(iter.next_alive()) { |
|
722 |
nmethod* nm = iter.method(); |
|
723 |
nm->oops_do(&voc); |
|
724 |
nm->verify_oop_relocations(); |
|
8724
693c6b883b54
7028374: race in fix_oop_relocations for scavengeable nmethods
never
parents:
8672
diff
changeset
|
725 |
} |
693c6b883b54
7028374: race in fix_oop_relocations for scavengeable nmethods
never
parents:
8672
diff
changeset
|
726 |
} |
693c6b883b54
7028374: race in fix_oop_relocations for scavengeable nmethods
never
parents:
8672
diff
changeset
|
727 |
|
26796 | 728 |
size_t CodeCache::capacity() { |
729 |
size_t cap = 0; |
|
730 |
FOR_ALL_HEAPS(heap) { |
|
731 |
cap += (*heap)->capacity(); |
|
732 |
} |
|
733 |
return cap; |
|
1 | 734 |
} |
735 |
||
26942
fa5ea7ff078d
8059390: code cache fills up for bigapps/Weblogic+medrec/nowarnings
zmajo
parents:
26919
diff
changeset
|
736 |
size_t CodeCache::unallocated_capacity(int code_blob_type) { |
fa5ea7ff078d
8059390: code cache fills up for bigapps/Weblogic+medrec/nowarnings
zmajo
parents:
26919
diff
changeset
|
737 |
CodeHeap* heap = get_code_heap(code_blob_type); |
fa5ea7ff078d
8059390: code cache fills up for bigapps/Weblogic+medrec/nowarnings
zmajo
parents:
26919
diff
changeset
|
738 |
return (heap != NULL) ? heap->unallocated_capacity() : 0; |
fa5ea7ff078d
8059390: code cache fills up for bigapps/Weblogic+medrec/nowarnings
zmajo
parents:
26919
diff
changeset
|
739 |
} |
fa5ea7ff078d
8059390: code cache fills up for bigapps/Weblogic+medrec/nowarnings
zmajo
parents:
26919
diff
changeset
|
740 |
|
26796 | 741 |
size_t CodeCache::unallocated_capacity() { |
742 |
size_t unallocated_cap = 0; |
|
743 |
FOR_ALL_HEAPS(heap) { |
|
744 |
unallocated_cap += (*heap)->unallocated_capacity(); |
|
745 |
} |
|
746 |
return unallocated_cap; |
|
747 |
} |
|
1 | 748 |
|
26796 | 749 |
size_t CodeCache::max_capacity() { |
750 |
size_t max_cap = 0; |
|
751 |
FOR_ALL_HEAPS(heap) { |
|
752 |
max_cap += (*heap)->max_capacity(); |
|
753 |
} |
|
754 |
return max_cap; |
|
1 | 755 |
} |
756 |
||
17617
4e330bce1812
8012371: Adjust Tiered compile threshold according to available space in code cache
anoll
parents:
17132
diff
changeset
|
757 |
/** |
26796 | 758 |
* Returns true if a CodeHeap is full and sets code_blob_type accordingly. |
759 |
*/ |
|
760 |
bool CodeCache::is_full(int* code_blob_type) { |
|
761 |
FOR_ALL_HEAPS(heap) { |
|
762 |
if ((*heap)->unallocated_capacity() < CodeCacheMinimumFreeSpace) { |
|
763 |
*code_blob_type = (*heap)->code_blob_type(); |
|
764 |
return true; |
|
765 |
} |
|
766 |
} |
|
767 |
return false; |
|
768 |
} |
|
769 |
||
770 |
/** |
|
771 |
* Returns the reverse free ratio. E.g., if 25% (1/4) of the code heap |
|
17617
4e330bce1812
8012371: Adjust Tiered compile threshold according to available space in code cache
anoll
parents:
17132
diff
changeset
|
772 |
* is free, reverse_free_ratio() returns 4. |
4e330bce1812
8012371: Adjust Tiered compile threshold according to available space in code cache
anoll
parents:
17132
diff
changeset
|
773 |
*/ |
26796 | 774 |
double CodeCache::reverse_free_ratio(int code_blob_type) { |
775 |
CodeHeap* heap = get_code_heap(code_blob_type); |
|
776 |
if (heap == NULL) { |
|
777 |
return 0; |
|
778 |
} |
|
779 |
double unallocated_capacity = (double)(heap->unallocated_capacity() - CodeCacheMinimumFreeSpace); |
|
780 |
double max_capacity = (double)heap->max_capacity(); |
|
17617
4e330bce1812
8012371: Adjust Tiered compile threshold according to available space in code cache
anoll
parents:
17132
diff
changeset
|
781 |
return max_capacity / unallocated_capacity; |
4e330bce1812
8012371: Adjust Tiered compile threshold according to available space in code cache
anoll
parents:
17132
diff
changeset
|
782 |
} |
1 | 783 |
|
26796 | 784 |
size_t CodeCache::bytes_allocated_in_freelists() { |
785 |
size_t allocated_bytes = 0; |
|
786 |
FOR_ALL_HEAPS(heap) { |
|
787 |
allocated_bytes += (*heap)->allocated_in_freelist(); |
|
788 |
} |
|
789 |
return allocated_bytes; |
|
790 |
} |
|
791 |
||
792 |
int CodeCache::allocated_segments() { |
|
793 |
int number_of_segments = 0; |
|
794 |
FOR_ALL_HEAPS(heap) { |
|
795 |
number_of_segments += (*heap)->allocated_segments(); |
|
796 |
} |
|
797 |
return number_of_segments; |
|
798 |
} |
|
799 |
||
800 |
size_t CodeCache::freelists_length() { |
|
801 |
size_t length = 0; |
|
802 |
FOR_ALL_HEAPS(heap) { |
|
803 |
length += (*heap)->freelist_length(); |
|
804 |
} |
|
805 |
return length; |
|
806 |
} |
|
807 |
||
1 | 808 |
void icache_init(); |
809 |
||
810 |
void CodeCache::initialize() { |
|
811 |
assert(CodeCacheSegmentSize >= (uintx)CodeEntryAlignment, "CodeCacheSegmentSize must be large enough to align entry points"); |
|
812 |
#ifdef COMPILER2 |
|
813 |
assert(CodeCacheSegmentSize >= (uintx)OptoLoopAlignment, "CodeCacheSegmentSize must be large enough to align inner loops"); |
|
814 |
#endif |
|
815 |
assert(CodeCacheSegmentSize >= sizeof(jdouble), "CodeCacheSegmentSize must be large enough to align constants"); |
|
816 |
// This was originally just a check of the alignment, causing failure, instead, round |
|
817 |
// the code cache to the page size. In particular, Solaris is moving to a larger |
|
818 |
// default page size. |
|
819 |
CodeCacheExpansionSize = round_to(CodeCacheExpansionSize, os::vm_page_size()); |
|
26796 | 820 |
|
821 |
if (SegmentedCodeCache) { |
|
822 |
// Use multiple code heaps |
|
823 |
initialize_heaps(); |
|
824 |
} else { |
|
825 |
// Use a single code heap |
|
826 |
ReservedCodeSpace rs = reserve_heap_memory(ReservedCodeCacheSize); |
|
26919
361b4b4c92c0
8059468: Fix PrintCodeCache output changed by JDK-8059137
thartmann
parents:
26918
diff
changeset
|
827 |
add_heap(rs, "CodeCache", InitialCodeCacheSize, CodeBlobType::All); |
1 | 828 |
} |
829 |
||
830 |
// Initialize ICache flush mechanism |
|
831 |
// This service is needed for os::register_code_area |
|
832 |
icache_init(); |
|
833 |
||
834 |
// Give OS a chance to register generated code area. |
|
835 |
// This is used on Windows 64 bit platforms to register |
|
836 |
// Structured Exception Handlers for our generated code. |
|
26796 | 837 |
os::register_code_area((char*)low_bound(), (char*)high_bound()); |
1 | 838 |
} |
839 |
||
840 |
void codeCache_init() { |
|
841 |
CodeCache::initialize(); |
|
842 |
} |
|
843 |
||
844 |
//------------------------------------------------------------------------------------------------ |
|
845 |
||
846 |
int CodeCache::number_of_nmethods_with_dependencies() { |
|
847 |
return _number_of_nmethods_with_dependencies; |
|
848 |
} |
|
849 |
||
850 |
void CodeCache::clear_inline_caches() { |
|
851 |
assert_locked_or_safepoint(CodeCache_lock); |
|
26796 | 852 |
NMethodIterator iter; |
853 |
while(iter.next_alive()) { |
|
854 |
iter.method()->clear_inline_caches(); |
|
1 | 855 |
} |
856 |
} |
|
857 |
||
22506
0759c126204d
7194669: CodeCache::mark_for_deoptimization should avoid verifying dependencies multiple times
anoll
parents:
20290
diff
changeset
|
858 |
// Keeps track of time spent for checking dependencies |
22921
ee35d5c0b1dc
8034839: jvm hangs with gc/gctests/LoadUnloadGC test
anoll
parents:
22506
diff
changeset
|
859 |
NOT_PRODUCT(static elapsedTimer dependentCheckTime;) |
1 | 860 |
|
861 |
int CodeCache::mark_for_deoptimization(DepChange& changes) { |
|
862 |
MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag); |
|
863 |
int number_of_marked_CodeBlobs = 0; |
|
864 |
||
865 |
// search the hierarchy looking for nmethods which are affected by the loading of this class |
|
866 |
||
867 |
// then search the interfaces this class implements looking for nmethods |
|
868 |
// which might be dependent of the fact that an interface only had one |
|
869 |
// implementor. |
|
22506
0759c126204d
7194669: CodeCache::mark_for_deoptimization should avoid verifying dependencies multiple times
anoll
parents:
20290
diff
changeset
|
870 |
// nmethod::check_all_dependencies works only correctly, if no safepoint |
0759c126204d
7194669: CodeCache::mark_for_deoptimization should avoid verifying dependencies multiple times
anoll
parents:
20290
diff
changeset
|
871 |
// can happen |
0759c126204d
7194669: CodeCache::mark_for_deoptimization should avoid verifying dependencies multiple times
anoll
parents:
20290
diff
changeset
|
872 |
No_Safepoint_Verifier nsv; |
0759c126204d
7194669: CodeCache::mark_for_deoptimization should avoid verifying dependencies multiple times
anoll
parents:
20290
diff
changeset
|
873 |
for (DepChange::ContextStream str(changes, nsv); str.next(); ) { |
0759c126204d
7194669: CodeCache::mark_for_deoptimization should avoid verifying dependencies multiple times
anoll
parents:
20290
diff
changeset
|
874 |
Klass* d = str.klass(); |
0759c126204d
7194669: CodeCache::mark_for_deoptimization should avoid verifying dependencies multiple times
anoll
parents:
20290
diff
changeset
|
875 |
number_of_marked_CodeBlobs += InstanceKlass::cast(d)->mark_dependent_nmethods(changes); |
1 | 876 |
} |
877 |
||
878 |
#ifndef PRODUCT |
|
22506
0759c126204d
7194669: CodeCache::mark_for_deoptimization should avoid verifying dependencies multiple times
anoll
parents:
20290
diff
changeset
|
879 |
if (VerifyDependencies) { |
0759c126204d
7194669: CodeCache::mark_for_deoptimization should avoid verifying dependencies multiple times
anoll
parents:
20290
diff
changeset
|
880 |
// Object pointers are used as unique identifiers for dependency arguments. This |
0759c126204d
7194669: CodeCache::mark_for_deoptimization should avoid verifying dependencies multiple times
anoll
parents:
20290
diff
changeset
|
881 |
// is only possible if no safepoint, i.e., GC occurs during the verification code. |
0759c126204d
7194669: CodeCache::mark_for_deoptimization should avoid verifying dependencies multiple times
anoll
parents:
20290
diff
changeset
|
882 |
dependentCheckTime.start(); |
0759c126204d
7194669: CodeCache::mark_for_deoptimization should avoid verifying dependencies multiple times
anoll
parents:
20290
diff
changeset
|
883 |
nmethod::check_all_dependencies(changes); |
0759c126204d
7194669: CodeCache::mark_for_deoptimization should avoid verifying dependencies multiple times
anoll
parents:
20290
diff
changeset
|
884 |
dependentCheckTime.stop(); |
0759c126204d
7194669: CodeCache::mark_for_deoptimization should avoid verifying dependencies multiple times
anoll
parents:
20290
diff
changeset
|
885 |
} |
0759c126204d
7194669: CodeCache::mark_for_deoptimization should avoid verifying dependencies multiple times
anoll
parents:
20290
diff
changeset
|
886 |
#endif |
1 | 887 |
|
888 |
return number_of_marked_CodeBlobs; |
|
889 |
} |
|
890 |
||
891 |
||
892 |
#ifdef HOTSWAP |
|
893 |
int CodeCache::mark_for_evol_deoptimization(instanceKlassHandle dependee) { |
|
894 |
MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag); |
|
895 |
int number_of_marked_CodeBlobs = 0; |
|
896 |
||
897 |
// Deoptimize all methods of the evolving class itself |
|
13728
882756847a04
6964458: Reimplement class meta-data storage to use native memory
coleenp
parents:
13391
diff
changeset
|
898 |
Array<Method*>* old_methods = dependee->methods(); |
1 | 899 |
for (int i = 0; i < old_methods->length(); i++) { |
900 |
ResourceMark rm; |
|
13728
882756847a04
6964458: Reimplement class meta-data storage to use native memory
coleenp
parents:
13391
diff
changeset
|
901 |
Method* old_method = old_methods->at(i); |
1 | 902 |
nmethod *nm = old_method->code(); |
903 |
if (nm != NULL) { |
|
904 |
nm->mark_for_deoptimization(); |
|
905 |
number_of_marked_CodeBlobs++; |
|
906 |
} |
|
907 |
} |
|
908 |
||
26796 | 909 |
NMethodIterator iter; |
910 |
while(iter.next_alive()) { |
|
911 |
nmethod* nm = iter.method(); |
|
1 | 912 |
if (nm->is_marked_for_deoptimization()) { |
913 |
// ...Already marked in the previous pass; don't count it again. |
|
914 |
} else if (nm->is_evol_dependent_on(dependee())) { |
|
915 |
ResourceMark rm; |
|
916 |
nm->mark_for_deoptimization(); |
|
917 |
number_of_marked_CodeBlobs++; |
|
918 |
} else { |
|
13728
882756847a04
6964458: Reimplement class meta-data storage to use native memory
coleenp
parents:
13391
diff
changeset
|
919 |
// flush caches in case they refer to a redefined Method* |
1 | 920 |
nm->clear_inline_caches(); |
921 |
} |
|
922 |
} |
|
923 |
||
924 |
return number_of_marked_CodeBlobs; |
|
925 |
} |
|
926 |
#endif // HOTSWAP |
|
927 |
||
928 |
||
929 |
// Deoptimize all methods |
|
930 |
void CodeCache::mark_all_nmethods_for_deoptimization() { |
|
931 |
MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag); |
|
26796 | 932 |
NMethodIterator iter; |
933 |
while(iter.next_alive()) { |
|
934 |
nmethod* nm = iter.method(); |
|
26580
8cca7fbe77b8
8056154: JVM crash with EXCEPTION_ACCESS_VIOLATION when there are many threads running
iveresov
parents:
25492
diff
changeset
|
935 |
if (!nm->method()->is_method_handle_intrinsic()) { |
8cca7fbe77b8
8056154: JVM crash with EXCEPTION_ACCESS_VIOLATION when there are many threads running
iveresov
parents:
25492
diff
changeset
|
936 |
nm->mark_for_deoptimization(); |
8cca7fbe77b8
8056154: JVM crash with EXCEPTION_ACCESS_VIOLATION when there are many threads running
iveresov
parents:
25492
diff
changeset
|
937 |
} |
1 | 938 |
} |
939 |
} |
|
940 |
||
13728
882756847a04
6964458: Reimplement class meta-data storage to use native memory
coleenp
parents:
13391
diff
changeset
|
941 |
int CodeCache::mark_for_deoptimization(Method* dependee) { |
1 | 942 |
MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag); |
943 |
int number_of_marked_CodeBlobs = 0; |
|
944 |
||
26796 | 945 |
NMethodIterator iter; |
946 |
while(iter.next_alive()) { |
|
947 |
nmethod* nm = iter.method(); |
|
1 | 948 |
if (nm->is_dependent_on_method(dependee)) { |
949 |
ResourceMark rm; |
|
950 |
nm->mark_for_deoptimization(); |
|
951 |
number_of_marked_CodeBlobs++; |
|
952 |
} |
|
953 |
} |
|
954 |
||
955 |
return number_of_marked_CodeBlobs; |
|
956 |
} |
|
957 |
||
958 |
void CodeCache::make_marked_nmethods_zombies() { |
|
959 |
assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint"); |
|
26796 | 960 |
NMethodIterator iter; |
961 |
while(iter.next_alive()) { |
|
962 |
nmethod* nm = iter.method(); |
|
1 | 963 |
if (nm->is_marked_for_deoptimization()) { |
964 |
||
965 |
// If the nmethod has already been made non-entrant and it can be converted |
|
966 |
// then zombie it now. Otherwise make it non-entrant and it will eventually |
|
967 |
// be zombied when it is no longer seen on the stack. Note that the nmethod |
|
968 |
// might be "entrant" and not on the stack and so could be zombied immediately |
|
969 |
// but we can't tell because we don't track it on stack until it becomes |
|
970 |
// non-entrant. |
|
971 |
||
972 |
if (nm->is_not_entrant() && nm->can_not_entrant_be_converted()) { |
|
973 |
nm->make_zombie(); |
|
974 |
} else { |
|
975 |
nm->make_not_entrant(); |
|
976 |
} |
|
977 |
} |
|
978 |
} |
|
979 |
} |
|
980 |
||
981 |
void CodeCache::make_marked_nmethods_not_entrant() { |
|
982 |
assert_locked_or_safepoint(CodeCache_lock); |
|
26796 | 983 |
NMethodIterator iter; |
984 |
while(iter.next_alive()) { |
|
985 |
nmethod* nm = iter.method(); |
|
1 | 986 |
if (nm->is_marked_for_deoptimization()) { |
987 |
nm->make_not_entrant(); |
|
988 |
} |
|
989 |
} |
|
990 |
} |
|
991 |
||
992 |
void CodeCache::verify() { |
|
26796 | 993 |
assert_locked_or_safepoint(CodeCache_lock); |
994 |
FOR_ALL_HEAPS(heap) { |
|
995 |
(*heap)->verify(); |
|
996 |
FOR_ALL_BLOBS(cb, *heap) { |
|
997 |
if (cb->is_alive()) { |
|
998 |
cb->verify(); |
|
999 |
} |
|
1000 |
} |
|
1 | 1001 |
} |
1002 |
} |
|
1003 |
||
26796 | 1004 |
// A CodeHeap is full. Print out warning and report event. |
1005 |
void CodeCache::report_codemem_full(int code_blob_type, bool print) { |
|
1006 |
// Get nmethod heap for the given CodeBlobType and build CodeCacheFull event |
|
26942
fa5ea7ff078d
8059390: code cache fills up for bigapps/Weblogic+medrec/nowarnings
zmajo
parents:
26919
diff
changeset
|
1007 |
CodeHeap* heap = get_code_heap(code_blob_type); |
fa5ea7ff078d
8059390: code cache fills up for bigapps/Weblogic+medrec/nowarnings
zmajo
parents:
26919
diff
changeset
|
1008 |
assert(heap != NULL, "heap is null"); |
26796 | 1009 |
|
1010 |
if (!heap->was_full() || print) { |
|
1011 |
// Not yet reported for this heap, report |
|
1012 |
heap->report_full(); |
|
1013 |
if (SegmentedCodeCache) { |
|
26918
4e1761669426
8059137: MemoryPoolMXBeans for different code heaps should contain 'Code heap' in their names
thartmann
parents:
26809
diff
changeset
|
1014 |
warning("%s is full. Compiler has been disabled.", CodeCache::get_code_heap_name(code_blob_type)); |
26796 | 1015 |
warning("Try increasing the code heap size using -XX:%s=", |
1016 |
(code_blob_type == CodeBlobType::MethodNonProfiled) ? "NonProfiledCodeHeapSize" : "ProfiledCodeHeapSize"); |
|
1017 |
} else { |
|
1018 |
warning("CodeCache is full. Compiler has been disabled."); |
|
1019 |
warning("Try increasing the code cache size using -XX:ReservedCodeCacheSize="); |
|
1020 |
} |
|
1021 |
ResourceMark rm; |
|
1022 |
stringStream s; |
|
1023 |
// Dump code cache into a buffer before locking the tty, |
|
1024 |
{ |
|
1025 |
MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag); |
|
1026 |
print_summary(&s); |
|
1027 |
} |
|
1028 |
ttyLocker ttyl; |
|
1029 |
tty->print("%s", s.as_string()); |
|
1030 |
} |
|
1031 |
||
18025 | 1032 |
_codemem_full_count++; |
1033 |
EventCodeCacheFull event; |
|
1034 |
if (event.should_commit()) { |
|
26796 | 1035 |
event.set_codeBlobType((u1)code_blob_type); |
1036 |
event.set_startAddress((u8)heap->low_boundary()); |
|
1037 |
event.set_commitedTopAddress((u8)heap->high()); |
|
1038 |
event.set_reservedTopAddress((u8)heap->high_boundary()); |
|
18025 | 1039 |
event.set_entryCount(nof_blobs()); |
1040 |
event.set_methodCount(nof_nmethods()); |
|
1041 |
event.set_adaptorCount(nof_adapters()); |
|
26796 | 1042 |
event.set_unallocatedCapacity(heap->unallocated_capacity()/K); |
18025 | 1043 |
event.set_fullCount(_codemem_full_count); |
1044 |
event.commit(); |
|
1045 |
} |
|
1046 |
} |
|
1047 |
||
23214
b6426873cb37
8029799: vm/mlvm/anonloader/stress/oome prints warning: CodeHeap: # of free blocks > 10000
anoll
parents:
22921
diff
changeset
|
1048 |
void CodeCache::print_memory_overhead() { |
b6426873cb37
8029799: vm/mlvm/anonloader/stress/oome prints warning: CodeHeap: # of free blocks > 10000
anoll
parents:
22921
diff
changeset
|
1049 |
size_t wasted_bytes = 0; |
26796 | 1050 |
FOR_ALL_HEAPS(heap) { |
1051 |
CodeHeap* curr_heap = *heap; |
|
1052 |
for (CodeBlob* cb = (CodeBlob*)curr_heap->first(); cb != NULL; cb = (CodeBlob*)curr_heap->next(cb)) { |
|
1053 |
HeapBlock* heap_block = ((HeapBlock*)cb) - 1; |
|
1054 |
wasted_bytes += heap_block->length() * CodeCacheSegmentSize - cb->size(); |
|
1055 |
} |
|
23214
b6426873cb37
8029799: vm/mlvm/anonloader/stress/oome prints warning: CodeHeap: # of free blocks > 10000
anoll
parents:
22921
diff
changeset
|
1056 |
} |
b6426873cb37
8029799: vm/mlvm/anonloader/stress/oome prints warning: CodeHeap: # of free blocks > 10000
anoll
parents:
22921
diff
changeset
|
1057 |
// Print bytes that are allocated in the freelist |
b6426873cb37
8029799: vm/mlvm/anonloader/stress/oome prints warning: CodeHeap: # of free blocks > 10000
anoll
parents:
22921
diff
changeset
|
1058 |
ttyLocker ttl; |
26796 | 1059 |
tty->print_cr("Number of elements in freelist: " SSIZE_FORMAT, freelists_length()); |
1060 |
tty->print_cr("Allocated in freelist: " SSIZE_FORMAT "kB", bytes_allocated_in_freelists()/K); |
|
24424
2658d7834c6e
8037816: Fix for 8036122 breaks build with Xcode5/clang
drchase
parents:
23214
diff
changeset
|
1061 |
tty->print_cr("Unused bytes in CodeBlobs: " SSIZE_FORMAT "kB", (wasted_bytes/K)); |
2658d7834c6e
8037816: Fix for 8036122 breaks build with Xcode5/clang
drchase
parents:
23214
diff
changeset
|
1062 |
tty->print_cr("Segment map size: " SSIZE_FORMAT "kB", allocated_segments()/K); // 1 byte per segment |
23214
b6426873cb37
8029799: vm/mlvm/anonloader/stress/oome prints warning: CodeHeap: # of free blocks > 10000
anoll
parents:
22921
diff
changeset
|
1063 |
} |
b6426873cb37
8029799: vm/mlvm/anonloader/stress/oome prints warning: CodeHeap: # of free blocks > 10000
anoll
parents:
22921
diff
changeset
|
1064 |
|
1 | 1065 |
//------------------------------------------------------------------------------------------------ |
1066 |
// Non-product version |
|
1067 |
||
1068 |
#ifndef PRODUCT |
|
1069 |
||
3908 | 1070 |
void CodeCache::print_trace(const char* event, CodeBlob* cb, int size) { |
1071 |
if (PrintCodeCache2) { // Need to add a new flag |
|
1072 |
ResourceMark rm; |
|
1073 |
if (size == 0) size = cb->size(); |
|
24424
2658d7834c6e
8037816: Fix for 8036122 breaks build with Xcode5/clang
drchase
parents:
23214
diff
changeset
|
1074 |
tty->print_cr("CodeCache %s: addr: " INTPTR_FORMAT ", size: 0x%x", event, p2i(cb), size); |
3908 | 1075 |
} |
1076 |
} |
|
1077 |
||
1 | 1078 |
void CodeCache::print_internals() { |
1079 |
int nmethodCount = 0; |
|
1080 |
int runtimeStubCount = 0; |
|
1081 |
int adapterCount = 0; |
|
1082 |
int deoptimizationStubCount = 0; |
|
1083 |
int uncommonTrapStubCount = 0; |
|
1084 |
int bufferBlobCount = 0; |
|
1085 |
int total = 0; |
|
1086 |
int nmethodAlive = 0; |
|
1087 |
int nmethodNotEntrant = 0; |
|
1088 |
int nmethodZombie = 0; |
|
1089 |
int nmethodUnloaded = 0; |
|
1090 |
int nmethodJava = 0; |
|
1091 |
int nmethodNative = 0; |
|
23214
b6426873cb37
8029799: vm/mlvm/anonloader/stress/oome prints warning: CodeHeap: # of free blocks > 10000
anoll
parents:
22921
diff
changeset
|
1092 |
int max_nm_size = 0; |
1 | 1093 |
ResourceMark rm; |
1094 |
||
26796 | 1095 |
int i = 0; |
1096 |
FOR_ALL_HEAPS(heap) { |
|
1097 |
if (SegmentedCodeCache && Verbose) { |
|
26918
4e1761669426
8059137: MemoryPoolMXBeans for different code heaps should contain 'Code heap' in their names
thartmann
parents:
26809
diff
changeset
|
1098 |
tty->print_cr("-- %s --", (*heap)->name()); |
26796 | 1099 |
} |
1100 |
FOR_ALL_BLOBS(cb, *heap) { |
|
1101 |
total++; |
|
1102 |
if (cb->is_nmethod()) { |
|
1103 |
nmethod* nm = (nmethod*)cb; |
|
1 | 1104 |
|
26796 | 1105 |
if (Verbose && nm->method() != NULL) { |
1106 |
ResourceMark rm; |
|
1107 |
char *method_name = nm->method()->name_and_sig_as_C_string(); |
|
1108 |
tty->print("%s", method_name); |
|
1109 |
if(nm->is_alive()) { tty->print_cr(" alive"); } |
|
1110 |
if(nm->is_not_entrant()) { tty->print_cr(" not-entrant"); } |
|
1111 |
if(nm->is_zombie()) { tty->print_cr(" zombie"); } |
|
1112 |
} |
|
1 | 1113 |
|
26796 | 1114 |
nmethodCount++; |
1 | 1115 |
|
26796 | 1116 |
if(nm->is_alive()) { nmethodAlive++; } |
1117 |
if(nm->is_not_entrant()) { nmethodNotEntrant++; } |
|
1118 |
if(nm->is_zombie()) { nmethodZombie++; } |
|
1119 |
if(nm->is_unloaded()) { nmethodUnloaded++; } |
|
1120 |
if(nm->method() != NULL && nm->is_native_method()) { nmethodNative++; } |
|
1 | 1121 |
|
26796 | 1122 |
if(nm->method() != NULL && nm->is_java_method()) { |
1123 |
nmethodJava++; |
|
1124 |
max_nm_size = MAX2(max_nm_size, nm->size()); |
|
1125 |
} |
|
1126 |
} else if (cb->is_runtime_stub()) { |
|
1127 |
runtimeStubCount++; |
|
1128 |
} else if (cb->is_deoptimization_stub()) { |
|
1129 |
deoptimizationStubCount++; |
|
1130 |
} else if (cb->is_uncommon_trap_stub()) { |
|
1131 |
uncommonTrapStubCount++; |
|
1132 |
} else if (cb->is_adapter_blob()) { |
|
1133 |
adapterCount++; |
|
1134 |
} else if (cb->is_buffer_blob()) { |
|
1135 |
bufferBlobCount++; |
|
1 | 1136 |
} |
1137 |
} |
|
1138 |
} |
|
1139 |
||
1140 |
int bucketSize = 512; |
|
23214
b6426873cb37
8029799: vm/mlvm/anonloader/stress/oome prints warning: CodeHeap: # of free blocks > 10000
anoll
parents:
22921
diff
changeset
|
1141 |
int bucketLimit = max_nm_size / bucketSize + 1; |
13195 | 1142 |
int *buckets = NEW_C_HEAP_ARRAY(int, bucketLimit, mtCode); |
23214
b6426873cb37
8029799: vm/mlvm/anonloader/stress/oome prints warning: CodeHeap: # of free blocks > 10000
anoll
parents:
22921
diff
changeset
|
1143 |
memset(buckets, 0, sizeof(int) * bucketLimit); |
1 | 1144 |
|
26796 | 1145 |
NMethodIterator iter; |
1146 |
while(iter.next()) { |
|
1147 |
nmethod* nm = iter.method(); |
|
1148 |
if(nm->method() != NULL && nm->is_java_method()) { |
|
1149 |
buckets[nm->size() / bucketSize]++; |
|
1 | 1150 |
} |
1151 |
} |
|
23214
b6426873cb37
8029799: vm/mlvm/anonloader/stress/oome prints warning: CodeHeap: # of free blocks > 10000
anoll
parents:
22921
diff
changeset
|
1152 |
|
1 | 1153 |
tty->print_cr("Code Cache Entries (total of %d)",total); |
1154 |
tty->print_cr("-------------------------------------------------"); |
|
1155 |
tty->print_cr("nmethods: %d",nmethodCount); |
|
1156 |
tty->print_cr("\talive: %d",nmethodAlive); |
|
1157 |
tty->print_cr("\tnot_entrant: %d",nmethodNotEntrant); |
|
1158 |
tty->print_cr("\tzombie: %d",nmethodZombie); |
|
1159 |
tty->print_cr("\tunloaded: %d",nmethodUnloaded); |
|
1160 |
tty->print_cr("\tjava: %d",nmethodJava); |
|
1161 |
tty->print_cr("\tnative: %d",nmethodNative); |
|
1162 |
tty->print_cr("runtime_stubs: %d",runtimeStubCount); |
|
1163 |
tty->print_cr("adapters: %d",adapterCount); |
|
1164 |
tty->print_cr("buffer blobs: %d",bufferBlobCount); |
|
1165 |
tty->print_cr("deoptimization_stubs: %d",deoptimizationStubCount); |
|
1166 |
tty->print_cr("uncommon_traps: %d",uncommonTrapStubCount); |
|
1167 |
tty->print_cr("\nnmethod size distribution (non-zombie java)"); |
|
1168 |
tty->print_cr("-------------------------------------------------"); |
|
1169 |
||
1170 |
for(int i=0; i<bucketLimit; i++) { |
|
1171 |
if(buckets[i] != 0) { |
|
1172 |
tty->print("%d - %d bytes",i*bucketSize,(i+1)*bucketSize); |
|
1173 |
tty->fill_to(40); |
|
1174 |
tty->print_cr("%d",buckets[i]); |
|
1175 |
} |
|
1176 |
} |
|
1177 |
||
13195 | 1178 |
FREE_C_HEAP_ARRAY(int, buckets, mtCode); |
23214
b6426873cb37
8029799: vm/mlvm/anonloader/stress/oome prints warning: CodeHeap: # of free blocks > 10000
anoll
parents:
22921
diff
changeset
|
1179 |
print_memory_overhead(); |
1 | 1180 |
} |
1181 |
||
15201
f3d755b11424
8005204: Code Cache Reduction: command line options implementation
vladidan
parents:
13878
diff
changeset
|
1182 |
#endif // !PRODUCT |
f3d755b11424
8005204: Code Cache Reduction: command line options implementation
vladidan
parents:
13878
diff
changeset
|
1183 |
|
1 | 1184 |
void CodeCache::print() { |
15201
f3d755b11424
8005204: Code Cache Reduction: command line options implementation
vladidan
parents:
13878
diff
changeset
|
1185 |
print_summary(tty); |
f3d755b11424
8005204: Code Cache Reduction: command line options implementation
vladidan
parents:
13878
diff
changeset
|
1186 |
|
f3d755b11424
8005204: Code Cache Reduction: command line options implementation
vladidan
parents:
13878
diff
changeset
|
1187 |
#ifndef PRODUCT |
f3d755b11424
8005204: Code Cache Reduction: command line options implementation
vladidan
parents:
13878
diff
changeset
|
1188 |
if (!Verbose) return; |
f3d755b11424
8005204: Code Cache Reduction: command line options implementation
vladidan
parents:
13878
diff
changeset
|
1189 |
|
1 | 1190 |
CodeBlob_sizes live; |
1191 |
CodeBlob_sizes dead; |
|
1192 |
||
26796 | 1193 |
FOR_ALL_HEAPS(heap) { |
1194 |
FOR_ALL_BLOBS(cb, *heap) { |
|
1195 |
if (!cb->is_alive()) { |
|
1196 |
dead.add(cb); |
|
1197 |
} else { |
|
1198 |
live.add(cb); |
|
1199 |
} |
|
1 | 1200 |
} |
1201 |
} |
|
1202 |
||
1203 |
tty->print_cr("CodeCache:"); |
|
22506
0759c126204d
7194669: CodeCache::mark_for_deoptimization should avoid verifying dependencies multiple times
anoll
parents:
20290
diff
changeset
|
1204 |
tty->print_cr("nmethod dependency checking time %fs", dependentCheckTime.seconds()); |
1 | 1205 |
|
1206 |
if (!live.is_empty()) { |
|
1207 |
live.print("live"); |
|
1208 |
} |
|
1209 |
if (!dead.is_empty()) { |
|
1210 |
dead.print("dead"); |
|
1211 |
} |
|
1212 |
||
15201
f3d755b11424
8005204: Code Cache Reduction: command line options implementation
vladidan
parents:
13878
diff
changeset
|
1213 |
if (WizardMode) { |
1 | 1214 |
// print the oop_map usage |
1215 |
int code_size = 0; |
|
1216 |
int number_of_blobs = 0; |
|
1217 |
int number_of_oop_maps = 0; |
|
1218 |
int map_size = 0; |
|
26796 | 1219 |
FOR_ALL_HEAPS(heap) { |
1220 |
FOR_ALL_BLOBS(cb, *heap) { |
|
1221 |
if (cb->is_alive()) { |
|
1222 |
number_of_blobs++; |
|
1223 |
code_size += cb->code_size(); |
|
1224 |
OopMapSet* set = cb->oop_maps(); |
|
1225 |
if (set != NULL) { |
|
1226 |
number_of_oop_maps += set->size(); |
|
1227 |
map_size += set->heap_size(); |
|
1228 |
} |
|
1 | 1229 |
} |
1230 |
} |
|
1231 |
} |
|
1232 |
tty->print_cr("OopMaps"); |
|
1233 |
tty->print_cr(" #blobs = %d", number_of_blobs); |
|
1234 |
tty->print_cr(" code size = %d", code_size); |
|
1235 |
tty->print_cr(" #oop_maps = %d", number_of_oop_maps); |
|
1236 |
tty->print_cr(" map size = %d", map_size); |
|
1237 |
} |
|
1238 |
||
15201
f3d755b11424
8005204: Code Cache Reduction: command line options implementation
vladidan
parents:
13878
diff
changeset
|
1239 |
#endif // !PRODUCT |
1 | 1240 |
} |
1241 |
||
15201
f3d755b11424
8005204: Code Cache Reduction: command line options implementation
vladidan
parents:
13878
diff
changeset
|
1242 |
void CodeCache::print_summary(outputStream* st, bool detailed) { |
26796 | 1243 |
FOR_ALL_HEAPS(heap_iterator) { |
1244 |
CodeHeap* heap = (*heap_iterator); |
|
1245 |
size_t total = (heap->high_boundary() - heap->low_boundary()); |
|
1246 |
if (SegmentedCodeCache) { |
|
26918
4e1761669426
8059137: MemoryPoolMXBeans for different code heaps should contain 'Code heap' in their names
thartmann
parents:
26809
diff
changeset
|
1247 |
st->print("%s:", heap->name()); |
26796 | 1248 |
} else { |
26919
361b4b4c92c0
8059468: Fix PrintCodeCache output changed by JDK-8059137
thartmann
parents:
26918
diff
changeset
|
1249 |
st->print("CodeCache:"); |
26796 | 1250 |
} |
1251 |
st->print_cr(" size=" SIZE_FORMAT "Kb used=" SIZE_FORMAT |
|
1252 |
"Kb max_used=" SIZE_FORMAT "Kb free=" SIZE_FORMAT "Kb", |
|
1253 |
total/K, (total - heap->unallocated_capacity())/K, |
|
1254 |
heap->max_allocated_capacity()/K, heap->unallocated_capacity()/K); |
|
1255 |
||
1256 |
if (detailed) { |
|
1257 |
st->print_cr(" bounds [" INTPTR_FORMAT ", " INTPTR_FORMAT ", " INTPTR_FORMAT "]", |
|
1258 |
p2i(heap->low_boundary()), |
|
1259 |
p2i(heap->high()), |
|
1260 |
p2i(heap->high_boundary())); |
|
1261 |
} |
|
1262 |
} |
|
7108 | 1263 |
|
15201
f3d755b11424
8005204: Code Cache Reduction: command line options implementation
vladidan
parents:
13878
diff
changeset
|
1264 |
if (detailed) { |
f3d755b11424
8005204: Code Cache Reduction: command line options implementation
vladidan
parents:
13878
diff
changeset
|
1265 |
st->print_cr(" total_blobs=" UINT32_FORMAT " nmethods=" UINT32_FORMAT |
26796 | 1266 |
" adapters=" UINT32_FORMAT, |
1267 |
nof_blobs(), nof_nmethods(), nof_adapters()); |
|
15201
f3d755b11424
8005204: Code Cache Reduction: command line options implementation
vladidan
parents:
13878
diff
changeset
|
1268 |
st->print_cr(" compilation: %s", CompileBroker::should_compile_new_jobs() ? |
f3d755b11424
8005204: Code Cache Reduction: command line options implementation
vladidan
parents:
13878
diff
changeset
|
1269 |
"enabled" : Arguments::mode() == Arguments::_int ? |
f3d755b11424
8005204: Code Cache Reduction: command line options implementation
vladidan
parents:
13878
diff
changeset
|
1270 |
"disabled (interpreter mode)" : |
f3d755b11424
8005204: Code Cache Reduction: command line options implementation
vladidan
parents:
13878
diff
changeset
|
1271 |
"disabled (not enough contiguous free space left)"); |
f3d755b11424
8005204: Code Cache Reduction: command line options implementation
vladidan
parents:
13878
diff
changeset
|
1272 |
} |
7108 | 1273 |
} |
8672
26a427ab6f32
7025742: Can not use CodeCache::unallocated_capacity() with fragmented CodeCache
kvn
parents:
7715
diff
changeset
|
1274 |
|
26587 | 1275 |
void CodeCache::print_codelist(outputStream* st) { |
1276 |
assert_locked_or_safepoint(CodeCache_lock); |
|
1277 |
||
26796 | 1278 |
NMethodIterator iter; |
1279 |
while(iter.next_alive()) { |
|
1280 |
nmethod* nm = iter.method(); |
|
26587 | 1281 |
ResourceMark rm; |
26796 | 1282 |
char *method_name = nm->method()->name_and_sig_as_C_string(); |
26587 | 1283 |
st->print_cr("%d %d %s ["INTPTR_FORMAT", "INTPTR_FORMAT" - "INTPTR_FORMAT"]", |
26796 | 1284 |
nm->compile_id(), nm->comp_level(), method_name, (intptr_t)nm->header_begin(), |
1285 |
(intptr_t)nm->code_begin(), (intptr_t)nm->code_end()); |
|
26587 | 1286 |
} |
1287 |
} |
|
1288 |
||
1289 |
void CodeCache::print_layout(outputStream* st) { |
|
1290 |
assert_locked_or_safepoint(CodeCache_lock); |
|
1291 |
ResourceMark rm; |
|
1292 |
||
1293 |
print_summary(st, true); |
|
1294 |
} |
|
1295 |
||
8672
26a427ab6f32
7025742: Can not use CodeCache::unallocated_capacity() with fragmented CodeCache
kvn
parents:
7715
diff
changeset
|
1296 |
void CodeCache::log_state(outputStream* st) { |
26a427ab6f32
7025742: Can not use CodeCache::unallocated_capacity() with fragmented CodeCache
kvn
parents:
7715
diff
changeset
|
1297 |
st->print(" total_blobs='" UINT32_FORMAT "' nmethods='" UINT32_FORMAT "'" |
17016
78b1c3670525
8006952: Slow VM due to excessive code cache freelist iteration
neliasso
parents:
15201
diff
changeset
|
1298 |
" adapters='" UINT32_FORMAT "' free_code_cache='" SIZE_FORMAT "'", |
8672
26a427ab6f32
7025742: Can not use CodeCache::unallocated_capacity() with fragmented CodeCache
kvn
parents:
7715
diff
changeset
|
1299 |
nof_blobs(), nof_nmethods(), nof_adapters(), |
17016
78b1c3670525
8006952: Slow VM due to excessive code cache freelist iteration
neliasso
parents:
15201
diff
changeset
|
1300 |
unallocated_capacity()); |
8672
26a427ab6f32
7025742: Can not use CodeCache::unallocated_capacity() with fragmented CodeCache
kvn
parents:
7715
diff
changeset
|
1301 |
} |