26 #include "classfile/javaClasses.hpp" |
26 #include "classfile/javaClasses.hpp" |
27 #include "classfile/systemDictionary.hpp" |
27 #include "classfile/systemDictionary.hpp" |
28 #include "classfile/verifier.hpp" |
28 #include "classfile/verifier.hpp" |
29 #include "classfile/vmSymbols.hpp" |
29 #include "classfile/vmSymbols.hpp" |
30 #include "compiler/compileBroker.hpp" |
30 #include "compiler/compileBroker.hpp" |
31 #include "gc_implementation/shared/markSweep.inline.hpp" |
|
32 #include "gc_interface/collectedHeap.inline.hpp" |
31 #include "gc_interface/collectedHeap.inline.hpp" |
33 #include "interpreter/oopMapCache.hpp" |
32 #include "interpreter/oopMapCache.hpp" |
34 #include "interpreter/rewriter.hpp" |
33 #include "interpreter/rewriter.hpp" |
35 #include "jvmtifiles/jvmti.h" |
34 #include "jvmtifiles/jvmti.h" |
36 #include "memory/genOopClosures.inline.hpp" |
|
37 #include "memory/heapInspection.hpp" |
35 #include "memory/heapInspection.hpp" |
38 #include "memory/iterator.inline.hpp" |
36 #include "memory/iterator.inline.hpp" |
39 #include "memory/metadataFactory.hpp" |
37 #include "memory/metadataFactory.hpp" |
40 #include "memory/oopFactory.hpp" |
38 #include "memory/oopFactory.hpp" |
41 #include "memory/specialized_oop_closures.hpp" |
39 #include "memory/specialized_oop_closures.hpp" |
42 #include "oops/fieldStreams.hpp" |
40 #include "oops/fieldStreams.hpp" |
43 #include "oops/instanceClassLoaderKlass.hpp" |
41 #include "oops/instanceClassLoaderKlass.hpp" |
44 #include "oops/instanceKlass.hpp" |
42 #include "oops/instanceKlass.inline.hpp" |
45 #include "oops/instanceMirrorKlass.hpp" |
43 #include "oops/instanceMirrorKlass.hpp" |
46 #include "oops/instanceOop.hpp" |
44 #include "oops/instanceOop.hpp" |
47 #include "oops/klass.inline.hpp" |
45 #include "oops/klass.inline.hpp" |
48 #include "oops/method.hpp" |
46 #include "oops/method.hpp" |
49 #include "oops/oop.inline.hpp" |
47 #include "oops/oop.inline.hpp" |
2008 } |
1995 } |
2009 return false; |
1996 return false; |
2010 } |
1997 } |
2011 #endif //PRODUCT |
1998 #endif //PRODUCT |
2012 |
1999 |
2013 |
|
2014 // Garbage collection |
|
2015 |
|
2016 #ifdef ASSERT |
|
2017 template <class T> void assert_is_in(T *p) { |
|
2018 T heap_oop = oopDesc::load_heap_oop(p); |
|
2019 if (!oopDesc::is_null(heap_oop)) { |
|
2020 oop o = oopDesc::decode_heap_oop_not_null(heap_oop); |
|
2021 assert(Universe::heap()->is_in(o), "should be in heap"); |
|
2022 } |
|
2023 } |
|
2024 template <class T> void assert_is_in_closed_subset(T *p) { |
|
2025 T heap_oop = oopDesc::load_heap_oop(p); |
|
2026 if (!oopDesc::is_null(heap_oop)) { |
|
2027 oop o = oopDesc::decode_heap_oop_not_null(heap_oop); |
|
2028 assert(Universe::heap()->is_in_closed_subset(o), |
|
2029 err_msg("should be in closed *p " INTPTR_FORMAT " " INTPTR_FORMAT, (address)p, (address)o)); |
|
2030 } |
|
2031 } |
|
2032 template <class T> void assert_is_in_reserved(T *p) { |
|
2033 T heap_oop = oopDesc::load_heap_oop(p); |
|
2034 if (!oopDesc::is_null(heap_oop)) { |
|
2035 oop o = oopDesc::decode_heap_oop_not_null(heap_oop); |
|
2036 assert(Universe::heap()->is_in_reserved(o), "should be in reserved"); |
|
2037 } |
|
2038 } |
|
2039 template <class T> void assert_nothing(T *p) {} |
|
2040 |
|
2041 #else |
|
2042 template <class T> void assert_is_in(T *p) {} |
|
2043 template <class T> void assert_is_in_closed_subset(T *p) {} |
|
2044 template <class T> void assert_is_in_reserved(T *p) {} |
|
2045 template <class T> void assert_nothing(T *p) {} |
|
2046 #endif // ASSERT |
|
2047 |
|
2048 // |
|
2049 // Macros that iterate over areas of oops which are specialized on type of |
|
2050 // oop pointer either narrow or wide, depending on UseCompressedOops |
|
2051 // |
|
2052 // Parameters are: |
|
2053 // T - type of oop to point to (either oop or narrowOop) |
|
2054 // start_p - starting pointer for region to iterate over |
|
2055 // count - number of oops or narrowOops to iterate over |
|
2056 // do_oop - action to perform on each oop (it's arbitrary C code which |
|
2057 // makes it more efficient to put in a macro rather than making |
|
2058 // it a template function) |
|
2059 // assert_fn - assert function which is template function because performance |
|
2060 // doesn't matter when enabled. |
|
2061 #define InstanceKlass_SPECIALIZED_OOP_ITERATE( \ |
|
2062 T, start_p, count, do_oop, \ |
|
2063 assert_fn) \ |
|
2064 { \ |
|
2065 T* p = (T*)(start_p); \ |
|
2066 T* const end = p + (count); \ |
|
2067 while (p < end) { \ |
|
2068 (assert_fn)(p); \ |
|
2069 do_oop; \ |
|
2070 ++p; \ |
|
2071 } \ |
|
2072 } |
|
2073 |
|
2074 #define InstanceKlass_SPECIALIZED_OOP_REVERSE_ITERATE( \ |
|
2075 T, start_p, count, do_oop, \ |
|
2076 assert_fn) \ |
|
2077 { \ |
|
2078 T* const start = (T*)(start_p); \ |
|
2079 T* p = start + (count); \ |
|
2080 while (start < p) { \ |
|
2081 --p; \ |
|
2082 (assert_fn)(p); \ |
|
2083 do_oop; \ |
|
2084 } \ |
|
2085 } |
|
2086 |
|
2087 #define InstanceKlass_SPECIALIZED_BOUNDED_OOP_ITERATE( \ |
|
2088 T, start_p, count, low, high, \ |
|
2089 do_oop, assert_fn) \ |
|
2090 { \ |
|
2091 T* const l = (T*)(low); \ |
|
2092 T* const h = (T*)(high); \ |
|
2093 assert(mask_bits((intptr_t)l, sizeof(T)-1) == 0 && \ |
|
2094 mask_bits((intptr_t)h, sizeof(T)-1) == 0, \ |
|
2095 "bounded region must be properly aligned"); \ |
|
2096 T* p = (T*)(start_p); \ |
|
2097 T* end = p + (count); \ |
|
2098 if (p < l) p = l; \ |
|
2099 if (end > h) end = h; \ |
|
2100 while (p < end) { \ |
|
2101 (assert_fn)(p); \ |
|
2102 do_oop; \ |
|
2103 ++p; \ |
|
2104 } \ |
|
2105 } |
|
2106 |
|
2107 |
|
2108 // The following macros call specialized macros, passing either oop or |
|
2109 // narrowOop as the specialization type. These test the UseCompressedOops |
|
2110 // flag. |
|
2111 #define InstanceKlass_OOP_MAP_ITERATE(obj, do_oop, assert_fn) \ |
|
2112 { \ |
|
2113 /* Compute oopmap block range. The common case \ |
|
2114 is nonstatic_oop_map_size == 1. */ \ |
|
2115 OopMapBlock* map = start_of_nonstatic_oop_maps(); \ |
|
2116 OopMapBlock* const end_map = map + nonstatic_oop_map_count(); \ |
|
2117 if (UseCompressedOops) { \ |
|
2118 while (map < end_map) { \ |
|
2119 InstanceKlass_SPECIALIZED_OOP_ITERATE(narrowOop, \ |
|
2120 obj->obj_field_addr<narrowOop>(map->offset()), map->count(), \ |
|
2121 do_oop, assert_fn) \ |
|
2122 ++map; \ |
|
2123 } \ |
|
2124 } else { \ |
|
2125 while (map < end_map) { \ |
|
2126 InstanceKlass_SPECIALIZED_OOP_ITERATE(oop, \ |
|
2127 obj->obj_field_addr<oop>(map->offset()), map->count(), \ |
|
2128 do_oop, assert_fn) \ |
|
2129 ++map; \ |
|
2130 } \ |
|
2131 } \ |
|
2132 } |
|
2133 |
|
2134 #define InstanceKlass_OOP_MAP_REVERSE_ITERATE(obj, do_oop, assert_fn) \ |
|
2135 { \ |
|
2136 OopMapBlock* const start_map = start_of_nonstatic_oop_maps(); \ |
|
2137 OopMapBlock* map = start_map + nonstatic_oop_map_count(); \ |
|
2138 if (UseCompressedOops) { \ |
|
2139 while (start_map < map) { \ |
|
2140 --map; \ |
|
2141 InstanceKlass_SPECIALIZED_OOP_REVERSE_ITERATE(narrowOop, \ |
|
2142 obj->obj_field_addr<narrowOop>(map->offset()), map->count(), \ |
|
2143 do_oop, assert_fn) \ |
|
2144 } \ |
|
2145 } else { \ |
|
2146 while (start_map < map) { \ |
|
2147 --map; \ |
|
2148 InstanceKlass_SPECIALIZED_OOP_REVERSE_ITERATE(oop, \ |
|
2149 obj->obj_field_addr<oop>(map->offset()), map->count(), \ |
|
2150 do_oop, assert_fn) \ |
|
2151 } \ |
|
2152 } \ |
|
2153 } |
|
2154 |
|
2155 #define InstanceKlass_BOUNDED_OOP_MAP_ITERATE(obj, low, high, do_oop, \ |
|
2156 assert_fn) \ |
|
2157 { \ |
|
2158 /* Compute oopmap block range. The common case is \ |
|
2159 nonstatic_oop_map_size == 1, so we accept the \ |
|
2160 usually non-existent extra overhead of examining \ |
|
2161 all the maps. */ \ |
|
2162 OopMapBlock* map = start_of_nonstatic_oop_maps(); \ |
|
2163 OopMapBlock* const end_map = map + nonstatic_oop_map_count(); \ |
|
2164 if (UseCompressedOops) { \ |
|
2165 while (map < end_map) { \ |
|
2166 InstanceKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(narrowOop, \ |
|
2167 obj->obj_field_addr<narrowOop>(map->offset()), map->count(), \ |
|
2168 low, high, \ |
|
2169 do_oop, assert_fn) \ |
|
2170 ++map; \ |
|
2171 } \ |
|
2172 } else { \ |
|
2173 while (map < end_map) { \ |
|
2174 InstanceKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(oop, \ |
|
2175 obj->obj_field_addr<oop>(map->offset()), map->count(), \ |
|
2176 low, high, \ |
|
2177 do_oop, assert_fn) \ |
|
2178 ++map; \ |
|
2179 } \ |
|
2180 } \ |
|
2181 } |
|
2182 |
|
2183 void InstanceKlass::oop_follow_contents(oop obj) { |
|
2184 assert(obj != NULL, "can't follow the content of NULL object"); |
|
2185 MarkSweep::follow_klass(obj->klass()); |
|
2186 InstanceKlass_OOP_MAP_ITERATE( \ |
|
2187 obj, \ |
|
2188 MarkSweep::mark_and_push(p), \ |
|
2189 assert_is_in_closed_subset) |
|
2190 } |
|
2191 |
|
2192 #if INCLUDE_ALL_GCS |
|
2193 void InstanceKlass::oop_follow_contents(ParCompactionManager* cm, |
|
2194 oop obj) { |
|
2195 assert(obj != NULL, "can't follow the content of NULL object"); |
|
2196 PSParallelCompact::follow_klass(cm, obj->klass()); |
|
2197 // Only mark the header and let the scan of the meta-data mark |
|
2198 // everything else. |
|
2199 InstanceKlass_OOP_MAP_ITERATE( \ |
|
2200 obj, \ |
|
2201 PSParallelCompact::mark_and_push(cm, p), \ |
|
2202 assert_is_in) |
|
2203 } |
|
2204 #endif // INCLUDE_ALL_GCS |
|
2205 |
|
2206 // closure's do_metadata() method dictates whether the given closure should be |
|
2207 // applied to the klass ptr in the object header. |
|
2208 |
|
2209 #define InstanceKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix) \ |
|
2210 \ |
|
2211 int InstanceKlass::oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) { \ |
|
2212 /* header */ \ |
|
2213 if_do_metadata_checked(closure, nv_suffix) { \ |
|
2214 closure->do_klass##nv_suffix(obj->klass()); \ |
|
2215 } \ |
|
2216 InstanceKlass_OOP_MAP_ITERATE( \ |
|
2217 obj, \ |
|
2218 (closure)->do_oop##nv_suffix(p), \ |
|
2219 assert_is_in_closed_subset) \ |
|
2220 return size_helper(); \ |
|
2221 } |
|
2222 |
|
2223 #if INCLUDE_ALL_GCS |
|
2224 #define InstanceKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) \ |
|
2225 \ |
|
2226 int InstanceKlass::oop_oop_iterate_backwards##nv_suffix(oop obj, \ |
|
2227 OopClosureType* closure) { \ |
|
2228 assert_should_ignore_metadata(closure, nv_suffix); \ |
|
2229 \ |
|
2230 /* instance variables */ \ |
|
2231 InstanceKlass_OOP_MAP_REVERSE_ITERATE( \ |
|
2232 obj, \ |
|
2233 (closure)->do_oop##nv_suffix(p), \ |
|
2234 assert_is_in_closed_subset) \ |
|
2235 return size_helper(); \ |
|
2236 } |
|
2237 #endif // INCLUDE_ALL_GCS |
|
2238 |
|
2239 #define InstanceKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix) \ |
|
2240 \ |
|
2241 int InstanceKlass::oop_oop_iterate##nv_suffix##_m(oop obj, \ |
|
2242 OopClosureType* closure, \ |
|
2243 MemRegion mr) { \ |
|
2244 if_do_metadata_checked(closure, nv_suffix) { \ |
|
2245 if (mr.contains(obj)) { \ |
|
2246 closure->do_klass##nv_suffix(obj->klass()); \ |
|
2247 } \ |
|
2248 } \ |
|
2249 InstanceKlass_BOUNDED_OOP_MAP_ITERATE( \ |
|
2250 obj, mr.start(), mr.end(), \ |
|
2251 (closure)->do_oop##nv_suffix(p), \ |
|
2252 assert_is_in_closed_subset) \ |
|
2253 return size_helper(); \ |
|
2254 } |
|
2255 |
|
2256 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceKlass_OOP_OOP_ITERATE_DEFN) |
|
2257 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceKlass_OOP_OOP_ITERATE_DEFN) |
|
2258 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceKlass_OOP_OOP_ITERATE_DEFN_m) |
|
2259 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceKlass_OOP_OOP_ITERATE_DEFN_m) |
|
2260 #if INCLUDE_ALL_GCS |
|
2261 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN) |
|
2262 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN) |
|
2263 #endif // INCLUDE_ALL_GCS |
|
2264 |
|
2265 int InstanceKlass::oop_adjust_pointers(oop obj) { |
|
2266 int size = size_helper(); |
|
2267 InstanceKlass_OOP_MAP_ITERATE( \ |
|
2268 obj, \ |
|
2269 MarkSweep::adjust_pointer(p), \ |
|
2270 assert_is_in) |
|
2271 return size; |
|
2272 } |
|
2273 |
|
2274 #if INCLUDE_ALL_GCS |
|
2275 void InstanceKlass::oop_push_contents(PSPromotionManager* pm, oop obj) { |
|
2276 InstanceKlass_OOP_MAP_REVERSE_ITERATE( \ |
|
2277 obj, \ |
|
2278 if (PSScavenge::should_scavenge(p)) { \ |
|
2279 pm->claim_or_forward_depth(p); \ |
|
2280 }, \ |
|
2281 assert_nothing ) |
|
2282 } |
|
2283 |
|
2284 int InstanceKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) { |
|
2285 int size = size_helper(); |
|
2286 InstanceKlass_OOP_MAP_ITERATE( \ |
|
2287 obj, \ |
|
2288 PSParallelCompact::adjust_pointer(p), \ |
|
2289 assert_is_in) |
|
2290 return size; |
|
2291 } |
|
2292 |
|
2293 #endif // INCLUDE_ALL_GCS |
|
2294 |
|
2295 void InstanceKlass::clean_implementors_list(BoolObjectClosure* is_alive) { |
2000 void InstanceKlass::clean_implementors_list(BoolObjectClosure* is_alive) { |
2296 assert(class_loader_data()->is_alive(is_alive), "this klass should be live"); |
2001 assert(class_loader_data()->is_alive(is_alive), "this klass should be live"); |
2297 if (is_interface()) { |
2002 if (is_interface()) { |
2298 if (ClassUnloading) { |
2003 if (ClassUnloading) { |
2299 Klass* impl = implementor(); |
2004 Klass* impl = implementor(); |